PEGASUS/build_pegasus.sh0000755000000000000000000000004111443145611014043 0ustar rootrootant -f build_pegasus.xml makejar PEGASUS/build_pegasus.xml0000644000000000000000000000240611443145611014235 0ustar rootroot PEGASUS: A Peta-Scale Graph Mining System PEGASUS/catepillar_star.edge0000644000000000000000000000010211443145611014653 0ustar rootroot0 1 1 2 1 3 3 4 3 6 5 6 6 7 6 8 6 9 10 11 10 12 10 13 10 14 10 15 PEGASUS/catepillar_star_rwr.query0000644000000000000000000000001211443145611016006 0ustar rootroot6 1 3 2.0 PEGASUS/classes/0000755000000000000000000000000011443145616012325 5ustar rootrootPEGASUS/classes/pegasus/0000755000000000000000000000000011443145620013767 5ustar rootrootPEGASUS/classes/pegasus/PegasusUtils$MapHistogram.class0000644000000000000000000000673111443145620022035 0ustar rootroot2 Y Z [" \] ^_ \` abc Yd e fg h i jk il mn mo mpq rst u vwx y{|} number_nodesJ nreducersI()VCodeLineNumberTableLocalVariableTablethis MapHistogram InnerClasses#Lpegasus/PegasusUtils$MapHistogram; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vlong_strLjava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_texttabposfirst_column_keyout_keyLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTable{xs~ Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java &' "# $%   java/lang/StringBuilder)MapHistogram configure(): number_nodes =  , nreducers=     org/apache/hadoop/io/IntWritable &org/apache/hadoop/io/Text &~ !org/apache/hadoop/io/LongWritable 34!pegasus/PegasusUtils$MapHistogram&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;)JgetNumReduceTasks()Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(J)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;length(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils! !"#$%&'(A** *) * +./0(~>*+*+ Y  *  *) =*>+.>1234(e l,:6 7":   77*q6 -Y Y)>!+,36@AHSk*f !56 l+.l78l9:l;<l=>f?6]@%ZA#SB% C l;DE%, FGHIJKK LMNOA3P(b*+,-)*4+.QRSRT<U>LMNVWX- z, PEGASUS/classes/pegasus/DegDist$RedPass2.class0000644000000000000000000000437611443145620017763 0ustar rootroot2N 3 45 467 8 9 :; <>?@()VCodeLineNumberTableLocalVariableTablethisRedPass2 InnerClassesLpegasus/DegDist$RedPass2;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_countIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;countLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsA Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile DegDist.java B CD EF org/apache/hadoop/io/IntWritable GH IJ KL Mpegasus/DegDist$RedPass2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/DegDist!   /* 56,,6`6-+Y !$4H55555 2!"5#5$% &'()A*_ *+,-4  +, - . / &'(012  = PEGASUS/classes/pegasus/Saxpy$MapStage1.class0000644000000000000000000000764311443145620017703 0ustar rootroot2 *ij i )k )l )m@ noB1 pq )rs tu vwx iy z{ |}~   ' t t t t  #  ) from_node_int"Lorg/apache/hadoop/io/IntWritable;isYpathZisXpathaD()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/Saxpy$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;y_pathLjava/lang/String;x_path input_file StackMapTablemap(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_texttabposIout_keyout_valLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Saxpy.java 34 org/apache/hadoop/io/IntWritable ,- ./ 0/  12map.input.file  java/lang/StringBuilder"Saxpy.MapStage1: map.input.file =  , isYpath=  , isXpath=, a=    3#org/apache/hadoop/io/DoubleWritable 3 !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text FGpegasus/Saxpy$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/String(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Dcontains(Ljava/lang/CharSequence;)Zjava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Z)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseIntcharAt(I)C(I)Ljava/lang/String;(I)V(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Saxpy!)*+,-./0/12345P**Y**6)+,-7 8;<=5 +M+ N*+  + :, *-*Y*** 6* 1235#6,748=9B;<748;>?z@AsBA#^CAD4EEE FG5r ,:669` v`! 9`! 9*-Y"#Y$%'* -Y"#Y* k$%62 @ACDF-G>ILLSMnNuOQ7\ 8;HIJKLMNOPAQRwSRtT2U LVD!> WXYZ[E !#\]^_AF`5b*+&,'-(6)748;abcbdMeO\]^fgh: )9 PEGASUS/classes/pegasus/RWRBlock$RedStage2.class0000644000000000000000000001107711443145620020216 0ustar rootroot2 .s -t -u -v -w -x4 yz {|0 -}2 ~5  s        ! !      - block_widthImixing_cD number_nodes query_nodeidquery_blockrowquery_blockind()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClassesLpegasus/RWRBlock$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv_elemLpegasus/VectorElem;cur_strLjava/lang/String; cur_vectorLjava/util/ArrayList; vector_iterLjava/util/Iterator;key"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; self_vectoriout_vals[Dout_strLocalVariableTypeTable(Lpegasus/VectorElem;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableY Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRBlock.java 89 23 41 51 61 71  01  java/lang/StringBuilderRedStage2 : block_width= , query_nodeid=   org/apache/hadoop/io/Text java/lang/Double  pegasus/VectorElem l   8  org/apache/hadoop/io/IntWritable EFpegasus/RWRBlock$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/ArrayListjava/lang/Stringjava/util/Iteratorjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;charAt(I)C substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;iterator()Ljava/util/Iterator;rowSval doubleValue()Dlength()I(D)Ljava/lang/StringBuilder;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRBlock!-./01234151617189:\******; < =@AB:w*+ *+  *+  *+ *** l*** pY* *;" '4ANv<w=@wCDEF: ,:* :6* R,t,:s::  :  ( !:  "\1 #$cRԧ%:6* i1Y&Y':1*kRY%'1(:&-+Y)*;r #,:ESVdku +< GH :`IJd6KL k/MN ,=@,OP,QN,RS,TU)VLW1 !XYZJ[>G\ d6K] k/M^ ,Q_,R`)V]a* bc2dbe.d16fghiAEj:_ *++,-,;<4 =@ kl mN nS oUfghpqr? -> PEGASUS/classes/pegasus/ResultInfo.class0000644000000000000000000000046611443145617017124 0ustar rootroot2 changedI unchanged()VCodeLineNumberTableLocalVariableTablethisLpegasus/ResultInfo; SourceFile ConCmpt.java pegasus/ResultInfojava/lang/Object  /* "  PEGASUS/classes/pegasus/PagerankBlock$MapStage25.class0000644000000000000000000000661711443145620021370 0ustar rootroot2 W XY Z[ \ ]^_ W` a b c de cf gh gij klm nop q r stu vxyz block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage25 InnerClasses"Lpegasus/PagerankBlock$MapStage25; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Velem_rowpagerankDkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;tokensiblock_idLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablexuo{|= Exceptions} Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankBlock.java "#~  ! java/lang/StringBuilderMapStage25: block_width =       org/apache/hadoop/io/IntWritable "org/apache/hadoop/io/Textv "{ !org/apache/hadoop/io/LongWritable /0 pegasus/PagerankBlock$MapStage25&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(I)V(D)Ljava/lang/StringBuilder;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankBlock! !"#$/*%& '*+,$f**+Y * % )&*'**-./0$f },:2:266Q26 `29 -Y*h `YY   %& #.8Dv|&p 8>1! D223 }'*}45}67}89}:; r<=c>=&W?!#Z@!A }8BC#& DEFGHIIUJKLMA/N$b*+,-%&4'*OPQPR9S;JKLTUV) w( PEGASUS/classes/pegasus/HadiBlock$RedStage3.class0000644000000000000000000000565511443145617020425 0ustar rootroot2z E FG FHI JK LM NO PQR E NS T PU J V W XYZ []^_ output_valLorg/apache/hadoop/io/Text;()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClassesLpegasus/HadiBlock$RedStage3;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vline[Ljava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;nh_sumD converged_sumI changed_sumLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable Exceptions` Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileHadiBlock.java a bc deorg/apache/hadoop/io/Text fg h ijk lmn opjava/lang/StringBuilder fq rs ft u v wx org/apache/hadoop/io/IntWritable #$ypegasus/HadiBlock$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/IntegerparseInt(Ljava/lang/String;)I(D)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiBlock!/*9 "#$b 966,=,:  2c9 2 `6 2 `6*Y Y      -+*2 ?@A CD%F1G=HIILKLM\ %$%& "'()*+,-./012 324)5+67  B89:;A#<_ *+,-94 " => ?* @, A.89:BCD! \ PEGASUS/classes/pegasus/HadiBlock$CombinerStage2.class0000644000000000000000000001124511443145617021440 0ustar rootroot2 ,r +s +t +u2 vw xy +z.01 {|} r~   U   $   ! !   + nreplicationIencode_bitmask cur_radius block_width()VCodeLineNumberTableLocalVariableTablethisCombinerStage2 InnerClasses"Lpegasus/HadiBlock$CombinerStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vjv_elemLpegasus/VectorElem;cur_strLjava/lang/String; cur_vectorLjava/util/ArrayList; vector_iterLjava/util/Iterator; new_vectorkey"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;iout_vals[[Jbs_countLocalVariableTypeTable(Lpegasus/VectorElem;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileHadiBlock.java 34 ./ 0/ 1/  2/ java/lang/StringBuilderCombinerStage2: block_width = , nreplication = , encode_bitmask = , cur_radius =   org/apache/hadoop/io/Text 3  pegasus/VectorElem kjava/lang/String o  org/apache/hadoop/io/IntWritable @A pegasus/HadiBlock$CombinerStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/util/ArrayListjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object; pegasus/GIMVparseHADIVector)(Ljava/lang/String;)Ljava/util/ArrayList;charAt(I)Ccollect'(Ljava/lang/Object;Ljava/lang/Object;)Viterator()Ljava/util/Iterator;rowSvalupdateHADIBitString([JLjava/lang/String;II)[JmakeHADICombinerBitString([[JIIII)Ljava/util/ArrayList;formatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;pegasus/HadiBlock!+,-./0/1/2/345J****6 7 8;<=5u*+*+ *+ *+  Y****6 '4t7u8;u>?@A5 **:66*$6*2 P,,:  :  s-+Y  : 6 5 !:  " "2 #$**%SDŽ{(****&: -+' (6f)28>GU\gwz7 B/#CD UnEF \gGH BIJ BB/KH 8;LMNJOPQRS/TUV/W>#CX \gGY BIZ KY N[O\]  ^_`ab ^_`ab ^_`ab;cd ^_`abcd`; ^_`ab)efghA@i5_ *+),-*674 8; jk lJ mP nRefgopq: +9 PEGASUS/classes/pegasus/PagerankNaive$RedStage3.class0000644000000000000000000000715011443145620021302 0ustar rootroot2 W X Y Z! [\ ]^# _` abc Wd e fg hi j kl m no npq r s tuv wyz{ number_nodesImixing_cD random_coeff()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClasses!Lpegasus/PagerankNaive$RedStage3; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_valuekey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; min_value max_value min_or_maxLocalVariableTypeTable;Ljava/util/Iterator;sLorg/apache/hadoop/mapred/OutputCollector; StackMapTableyv|}~ Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankNaive.java &' !" #$ %$    java/lang/StringBuilderRedStage2: number_nodes =  , mixing_c = , random_coeff =  | #org/apache/hadoop/io/DoubleWritable &}  org/apache/hadoop/io/IntWritable 34pegasus/PagerankNaive$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V()IhasNext()Znext()Ljava/lang/Object;()D(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive! !"#$%$&'(J****) * +./0(_*+*+ **g*o Y  ***) *^*_+._1234(i o99+6 ,4,9    9  9 -+Y-+Y)> #(07?CFK^n*\ # 5$ o+.o67o89o:;o<=l>$i?$ c@" Ao8Bo:CD& EFGHI* JKLMA3N(_ *+,-)*4 +. OP Q9 R; S=JKLTUV- x, PEGASUS/classes/pegasus/DegDist$MapPass1.class0000644000000000000000000000674311443145620017765 0ustar rootroot2 V W! XY Z[ \]^ V_ ` a b cd be fgh fij k lm V n op lq lrst uvwxdeg_typeI()VCodeLineNumberTableLocalVariableTablethisMapPass1 InnerClassesLpegasus/DegDist$MapPass1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V key_node_int"Lorg/apache/hadoop/io/IntWritable; from_node_int to_node_intkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;one_intLocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; StackMapTableyAj Exceptionsz Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile DegDist.java #$ !"{ |}~  java/lang/StringBuilder*MapPass1 : configure is called. degtype =  #y   org/apache/hadoop/io/IntWritable # "  " "!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text 01pegasus/DegDist$MapPass1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;(I)Vpegasus/DegDistOutDegset(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)VInDegInOutDeg! !"#$%8 **& *,'  (+,-%f**+Y * &/ 1)2'*(+*./01% ,::Y:*%Y:2-n*-Y2K*AY:Y: 2 2- - &N678:;$=.>7?CAMBZCsD}EFGHJKM'p 723543,53 (+6789:;<=>?@A$B3C :DEF>GH"GIJKLA0M%b*+,-&*'4(+NOPOQ;R=IJKSTU* l) PEGASUS/classes/pegasus/PegasusUtils$RedSumDoubleTextKey.class0000644000000000000000000000446211443145620023311 0ustar rootroot2P 4 56 578 9 : ;<= >@AB()VCodeLineNumberTableLocalVariableTablethisRedSumDoubleTextKey InnerClasses*Lpegasus/PegasusUtils$RedSumDoubleTextKey;reduce(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valDkeyLorg/apache/hadoop/io/Text;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable;Ljava/util/Iterator;lLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsC Signature(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java D EF GH#org/apache/hadoop/io/DoubleWritable IJ KL MNorg/apache/hadoop/io/Text O(pegasus/PegasusUtils$RedSumDoubleTextKey&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!   /*:  59,,9c9-+Y>@ AB!C$E4FH55555 !2"#5$5%& '()*A+_ *+,- :4  ,- . / 0!'()123  ? PEGASUS/classes/pegasus/RWRNaive$RedStage4.class0000644000000000000000000000440311443145620020223 0ustar rootroot2N 3 45 467 8 9 :; <>?@()VCodeLineNumberTableLocalVariableTablethis RedStage4 InnerClassesLpegasus/RWRNaive$RedStage4;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_valueIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsA Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRNaive.java B CD EF org/apache/hadoop/io/IntWritable GH IJ KL Mpegasus/RWRNaive$RedStage4&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive!   /* 56,,6`6-+Y !$4H55555 2!"5#5$% &'()A*_ *+,-4  +, - . / &'(012  = PEGASUS/classes/pegasus/ConCmptIVGen$MapStage1.class0000644000000000000000000000514211443145617021031 0ustar rootroot2i ? @A BCD BEF GH IJK ? L @ M NOP QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses Lpegasus/ConCmptIVGen$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV, ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptIVGen.java  XY#V Z[ \] org/apache/hadoop/io/IntWritable^ _` aorg/apache/hadoop/io/Textjava/lang/StringBuilder bc de fg!org/apache/hadoop/io/LongWritable  hpegasus/ConCmptIVGen$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptIVGen!/*(  [,::-Y2 Y Y 2  2 ",-.01!2"4Z5H[[!"[#$[%&['(U)*A+,- [%./012345A6b*+, -(47898:&;(234<=> R PEGASUS/classes/pegasus/RWRBlock$MapStage2.class0000644000000000000000000000453311443145620020220 0ustar rootroot2X 6 78 9:; <= >? @ ABC DFGH()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClassesLpegasus/RWRBlock$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;node_key"Lorg/apache/hadoop/io/IntWritable;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsI Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRBlock.java  JK L MN org/apache/hadoop/io/IntWritableO PQ Rorg/apache/hadoop/io/Text ST UV!org/apache/hadoop/io/LongWritable Wpegasus/RWRBlock$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRBlock! /* /,:Y2:-Y2  .H//// /!" $#$%&' /()*+,A-b*+ ,- 4./0/1 2")*+345  E PEGASUS/classes/pegasus/PagerankInitVector.class0000644000000000000000000001222111443145620020553 0ustar rootroot2 I             & ( ( (    7Q 7 7 7 7 7 7 7 7 RedStage1 InnerClasses MapStage1 output_pathLorg/apache/hadoop/fs/Path;initial_prinput_path number_nodesI nreducers()VCodeLineNumberTableLocalVariableTablethisLpegasus/PagerankInitVector;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ii StackMapTablegen_initial_pagerank_file (IILorg/apache/hadoop/fs/Path;)Vend_node start_node nmachinesinitial_input_pathgap file_nameLjava/lang/String;fileLjava/io/FileWriter;outLjava/io/BufferedWriter;fs!Lorg/apache/hadoop/fs/FileSystem; configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; SourceFilePagerankInitVector.java TU NOorg/apache/hadoop/fs/Pathpr_input T PO QR SR$org/apache/hadoop/conf/Configurationpegasus/PagerankInitVector d s=PagerankInitVector <# of nodes> <# of reducers> java/lang/StringBuilderargs.length = args[] = bc = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- 1[PEGASUS] Generating initial PageRank vector for  nodes. hi }~ 9 [PEGASUS] Initial vector for PageRank generated in HDFS  pagerank.initial_rank.tempjava/io/FileWriterjava/io/BufferedWriter T# number of nodes in graph = )creating initial pagerank (total nodes = ) U  .//  org/apache/hadoop/mapred/JobConf T PagerankInitVector_Stage1 $pegasus/PagerankInitVector$MapStage1 $pegasus/PagerankInitVector$RedStage1    org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionjava/lang/Stringjava/io/IOException(Ljava/lang/String;)V!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintlnprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;(Ljava/io/Writer;)VwriteclosegetConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;copyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetMapOutputValueClasssetOutputValueClass! IJNOPOQRSRTUV[!***Y**W"Y Z[\X !YZ [\VT Y Y * <WacdX]^_R`a bcV0WjlndeVa+SY+=+-Y+2Ӹ*Y+2*+2*+2Y***** *!"WY#+2$W>tu w(xLwRzV}d~nxX "4fRYZ]^g "/`ahiVi l6%:&Y': (Y ):  Y*$+Y,-6]h6d`hd6d6 Y.//$+ 0*12:  YY3YY-456WF <ZcjrX }jRjSkR8jRYZQRlRmO nR]fR opqr st Duv gr] wxyz{" wxyz{ wxyz{7 wxyz{`|}~Vu7Y*1 8L+9Y.*:+;<+=>+?@+Y*SA+*B+*C+DE+FG+FH+W2 +18?NV^elsXuYZf`aL? K = M PEGASUS/classes/pegasus/ConCmptBlock$CombinerStage2.class0000644000000000000000000000643011443145617022136 0ustar rootroot2 RS TU TVW X YZ [\] ^_` Ra bc X de Yf g hi [j kl mopq()VCodeLineNumberTableLocalVariableTablethisCombinerStage2 InnerClasses%Lpegasus/ConCmptBlock$CombinerStage2;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VexLjava/lang/Exception;cur_value_textLorg/apache/hadoop/io/Text; cur_ci_stringLjava/lang/String; cur_nodeidIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;out_valcur_min_nodeidLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTablerolstuW] Exceptionsv Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptBlock.java mois wx yzorg/apache/hadoop/io/Text {|r }~ java/lang/Exception java/lang/StringBuilderException! cur_ci_string=[ ] t {~  org/apache/hadoop/io/IntWritable %&#pegasus/ConCmptBlock$CombinerStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOExceptionhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;java/io/PrintStreamprintln(Ljava/lang/String;)VcharAt(I)C(Lorg/apache/hadoop/io/Text;)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock!/* !$%& :6,,::6 6 $: Y  s-+Y  6  6}, Y :-+Y%03 Z"%035T_orx p 5'( o)*"h+,%e-. !$/01234567,8.91:3;<5=+ >?@AB=C=D  .EFGHA%I_ *+,- 4 !$ JK L2 M4 N6EFGOPQ# n" PEGASUS/classes/pegasus/PagerankInitVector$MapStage1.class0000644000000000000000000000514111443145620022325 0ustar rootroot2h > ?@ ABC ADE FG HIJ > K ? L MNO PRST()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses&Lpegasus/PagerankInitVector$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableU ExceptionsV Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankInitVector.java  WX#U YZ [\ org/apache/hadoop/io/IntWritable] ^_ `org/apache/hadoop/io/Textjava/lang/StringBuilder ab cd ef!org/apache/hadoop/io/LongWritable  g$pegasus/PagerankInitVector$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankInitVector!/*'  S,::-Y2 Y Y 2  2 +,-/1R2HSS!"S#$S%&S'(M)*9+,- S%./01234A5b*+, -'467879&:(123;<= Q PEGASUS/classes/pegasus/ScalarMult$MapStage1Double.class0000644000000000000000000000704511443145620021775 0ustar rootroot2 "^_ ^ !` !a !b) cd ef !g hij ^k l m n op nq rs rt uv rw rx yz { |}~ ! from_node_int"Lorg/apache/hadoop/io/IntWritable;isYpathZisXpathsD()VCodeLineNumberTableLocalVariableTablethisMapStage1Double InnerClasses$Lpegasus/ScalarMult$MapStage1Double; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposIout_keyout_valLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; StackMapTable~ Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileScalarMult.java +, org/apache/hadoop/io/IntWritable $% &' ('  )* java/lang/StringBuilderScalarMult.MapStage1: s =     +#org/apache/hadoop/io/DoubleWritable + !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text 89"pegasus/ScalarMult$MapStage1Double&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseIntcharAt(I)C(I)Ljava/lang/String;(I)V(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ScalarMult!!"#$%&'(')*+,-P**Y**.)+,-/ 0345-f**+ Y * .1 3)4/*03*6789-; j,:669`v` 9` 9-YY* k.& 89;<>-?>ALDiE/\ j03j:;j<=j>?j@AdBC[DENFEKG*H j>IJ> KLMNOP QRSTA8U-b*+,- .)/403VWXWY?ZAQRS[\]2 !1 PEGASUS/classes/pegasus/NormalizeVector.class0000644000000000000000000000627711443145620020155 0ustar rootroot2 /R S T UV RW R XY Z[ Z\] ^_ X` ab c de fghi Rj klm n o pq prst R uvw pxy z{ c| )R} p~ input_pathLorg/apache/hadoop/fs/Path; output_path nreducersI()VCodeLineNumberTableLocalVariableTablethisLpegasus/NormalizeVector;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iadditional_multiplierDfs!Lorg/apache/hadoop/fs/FileSystem;lfsnew_argsscalar StackMapTable SourceFileNormalizeVector.java 67 12 32 45$org/apache/hadoop/conf/Configurationpegasus/NormalizeVector F YNormalizeVector <# of reducers>  DEorg/apache/hadoop/fs/Path 6  = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder+[PEGASUS] Normalizing a vector. input_path= , output_path=  java/lang/Stringpegasus/L1norm l1norm  L1norm = pegasus/ScalarMult smult_output K [PEGASUS] Normalization completed. The normalized vecotr is saved in HDFS . !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;getConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;pegasus/PegasusUtilsread_l1norm_result)(Lorg/apache/hadoop/conf/Configuration;)Ddelete(Lorg/apache/hadoop/fs/Path;Z)Z(D)Ljava/lang/StringBuilder;rename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Z!/0123245678J****9!& '(: ;< =>8TYY* < 9-/0:?@A5BC DE80  968:FG8 L+*Y+2*Y+2*+2+2J  Y+2+2 *:*::+2S* Y! W*"9Y#$W Y%& :+2SYY')o&(S*)Y* W*$WY+*,W Y-+2. 9b@A EF&G0H7J?KjMsN|QRTUVX[\]^ `a*cJe:HL;<L?@7HIsJK|LKM@NIO BCPQPEGASUS/classes/pegasus/Hadi$MapStage1.class0000644000000000000000000000642111443145617017443 0ustar rootroot2 P Q RS TU VWX PY Z [ \ ]^ \_ `ab `cdef gh i jkl mopqmake_symmetricI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/Hadi$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V src_nodeid dst_nodeidkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTabler= Exceptionss Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Hadi.java !"  t uvw xyz {|java/lang/StringBuilderMapStage1: make_symmetric = }~ }  #r  bc org/apache/hadoop/io/IntWritable !org/apache/hadoop/io/Text ! !org/apache/hadoop/io/LongWritable ./pegasus/Hadi$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi! !"#8 **$ %  &)*+#f**+Y * $ )%*&)*,-./# ,::22%-Y2Y2X26-YY2*-26-YY2$>!":\e%\ !0 eL1 &)23456789:;<=> 6?@AB!TCDEFA.G#b*+,-$%4&)HIJIK7L9CDEMNO( n' PEGASUS/classes/pegasus/PegasusUtils.class0000644000000000000000000001730311443145620017452 0ustar rootroot2Z I              !   * ) ( ( (          @   HIdentityPartition InnerClassesRangePartitionS1RangePartition RedAvgDoubleRedSumDoubleLongKeyRedSumDoubleTextKeyRedSumLongText RedSumDouble RedHistogram RedIdentityGen! RedIdentity" MapSwapDouble#MapHistogramText$ MapHistogram%MapIdentityDouble&MapIdentityLongText' MapIdentity()VCodeLineNumberTableLocalVariableTablethisLpegasus/PegasusUtils; open_log_file,(Ljava/lang/String;)Ljava/io/BufferedWriter; job_name_baseLjava/lang/String;fstreamLjava/io/FileWriter;outLjava/io/BufferedWriter; Exceptions(get_cur_datetime()Ljava/lang/String;DATE_FORMAT_NOWcalLjava/util/Calendar;sdfLjava/text/SimpleDateFormat;format_duration(J)Ljava/lang/String;millisJmin2(II)IaIb StackMapTablemax2 MatvecNaiveb(Lorg/apache/hadoop/conf/Configuration;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;II)Vconf&Lorg/apache/hadoop/conf/Configuration;nreducermat_pathvec_pathout_path transposeignore_weightsargs[Ljava/lang/String;copyToLocalFile_(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V hdfs_pathLorg/apache/hadoop/fs/Path; local_pathfs!Lorg/apache/hadoop/fs/FileSystem;readLocaldirOneline(Ljava/lang/String;)DinLjava/io/BufferedReader;e&Ljava/io/UnsupportedEncodingException;Ljava/io/IOException;linenew_path output_pathstr(Ljava/lang/String;I)DpartnoloadQueryNodeInfo&(Ljava/lang/String;)Ljava/lang/String;tokensi input_filecur_line query_count query_nodes[J query_weights[D sum_weightsD query_strread_l1norm_result)(Lorg/apache/hadoop/conf/Configuration;)D l2norm_outputlfslocal_output_pathresult SourceFilePegasusUtils.java mnjava/io/FileWriterjava/lang/StringBuilder )*.log + m,java/io/BufferedWriter m-yyyy-MM-dd HH:mm:ss. /0java/text/SimpleDateFormat 12 34HH:mm:ssGMT5 67 89java/util/Date m:; z<Running Matvecnaive: mat_path= , vec_path== >,java/lang/Stringtemp_mv )?nosympegasus/matvec/MatvecNaive@ AB*Done Matvecnaive. Output is saved in HDFS C DE F /part-00000java/io/BufferedReaderjava/io/InputStreamReaderjava/io/FileInputStreamUTF8 mG mH I Jn$java/io/UnsupportedEncodingExceptionjava/io/IOException KLM N /part-0000 /part-000/part-00 OPQ RSloadQueryNodeInfo: total  queries read. )T )Uorg/apache/hadoop/fs/Path l1norm_output VWl1norml1norm/ XY pegasus/PegasusUtilsjava/lang/Object&pegasus/PegasusUtils$IdentityPartition%pegasus/PegasusUtils$RangePartitionS1#pegasus/PegasusUtils$RangePartition!pegasus/PegasusUtils$RedAvgDouble(pegasus/PegasusUtils$RedSumDoubleLongKey(pegasus/PegasusUtils$RedSumDoubleTextKey#pegasus/PegasusUtils$RedSumLongText!pegasus/PegasusUtils$RedSumDouble!pegasus/PegasusUtils$RedHistogram#pegasus/PegasusUtils$RedIdentityGen pegasus/PegasusUtils$RedIdentity"pegasus/PegasusUtils$MapSwapDouble%pegasus/PegasusUtils$MapHistogramText!pegasus/PegasusUtils$MapHistogram&pegasus/PegasusUtils$MapIdentityDouble(pegasus/PegasusUtils$MapIdentityLongText pegasus/PegasusUtils$MapIdentityjava/lang/Exceptionappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString(Ljava/lang/String;)V(Ljava/io/Writer;)Vjava/util/Calendar getInstance()Ljava/util/Calendar;getTime()Ljava/util/Date;format$(Ljava/util/Date;)Ljava/lang/String;java/util/TimeZone getTimeZone((Ljava/lang/String;)Ljava/util/TimeZone; setTimeZone(Ljava/util/TimeZone;)V(J)Vjava/lang/SystemLjava/io/PrintStream;java/io/PrintStreamprintln(I)Ljava/lang/StringBuilder;!org/apache/hadoop/util/ToolRunnerrunY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Iorg/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V*(Ljava/io/InputStream;Ljava/lang/String;)V(Ljava/io/Reader;)VreadLineclosesplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Double parseDoublelength()Ijava/lang/Long parseLong(Ljava/lang/String;)J(J)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;delete(Lorg/apache/hadoop/fs/Path;Z)Z!HI mno3*p &q rs tuol&YY*L Y+ M,p*+$-q &vw xy$z{|} ~oc K L Y*M,+p2456q w  ol"M Y,N--Yp;= >?q "w  oN pCDFq  oN pJKMq  oY,-:YY,SYSYSYYS SY-SYYSYYS*!Y"#WY$p6 U"V)WGXTYaZ[\]^`acqRwww)|} o\ *%N-+,&pjm nq*  |} oUY*'LM(Y)Y*Y++,-.N-/M-0NN,,34N-259<19@2p6 stv0x5y9|<z=|@{A~ELSqH0 =ALUwAw>w<C|} o*M$ Y,6MB $dY,7MY,8MN(Y)Y*Y,+,-.:/N ::--34:25i1i2pJ '3NfiqRwwiAw$'&%C|} oA L=N:9:(Y)Y*Y*+,-.:+</L+9+34: - 2:P 25R1c9Ƨ ::Y;<6[\1oRY-/=>1?:dY>:hk1hp2pr  +/5<DNYbehkmprqz D! +=mr^ww  w3+M C>;w'&;|}LJHK MHN OHP QHR SHT UHV WHX YHZ [H\ ]H^ _H` aHb cHd eHf gHh iHj kHl PEGASUS/classes/pegasus/PagerankBlock$MapStage1.class0000644000000000000000000000523111443145620021271 0ustar rootroot2i ? @A BCD BEF GH IJ K LMN ? O @P QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses!Lpegasus/PagerankBlock$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV, ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankBlock.java  XY#V Z[ \] org/apache/hadoop/io/IntWritable^ _` aorg/apache/hadoop/io/Text bc dejava/lang/StringBuilder fg!org/apache/hadoop/io/LongWritable  hpegasus/PagerankBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;pegasus/PagerankBlock!/*,  6,::%-Y2 Y2 ;-Y2 Y Y22 * 01246!7"9):K<>H!"#$%&'(~)*j+,- %./01(72345A6b*+, -,47898:&;(234<=> R PEGASUS/classes/pegasus/PagerankBlock$PrCounters.class0000644000000000000000000000202311443145620021607 0ustar rootroot21 # $%' ( ) * +,CONVERGE_CHECK PrCounters InnerClasses"Lpegasus/PagerankBlock$PrCounters;$VALUES#[Lpegasus/PagerankBlock$PrCounters;values%()[Lpegasus/PagerankBlock$PrCounters;CodeLineNumberTablevalueOf6(Ljava/lang/String;)Lpegasus/PagerankBlock$PrCounters;LocalVariableTablenameLjava/lang/String;(Ljava/lang/String;I)Vthis Signature()V4Ljava/lang/Enum; SourceFilePagerankBlock.java  -./ pegasus/PagerankBlock$PrCounters 0   java/lang/Enumclone()Ljava/lang/Object;pegasus/PagerankBlock5(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;@1 @  " # 5 *#  1*+# 3Y Y S# !" & @PEGASUS/classes/pegasus/SaxpyTextoutput$MapStage1.class0000644000000000000000000000772511443145620022032 0ustar rootroot2 *ij i )k )l )m@ noB1 pq )rs tu vwx iy z{ |}~   ' t t t t  #  ) from_node_int"Lorg/apache/hadoop/io/IntWritable;isYpathZisXpathaD()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses#Lpegasus/SaxpyTextoutput$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;y_pathLjava/lang/String;x_path input_file StackMapTablemap(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_texttabposIout_keyout_valLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileSaxpyTextoutput.java 34 org/apache/hadoop/io/IntWritable ,- ./ 0/  12map.input.file  java/lang/StringBuilder,SaxpyTextoutput.MapStage1: map.input.file =  , isYpath=  , isXpath=, a=    3#org/apache/hadoop/io/DoubleWritable 3 !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text FG!pegasus/SaxpyTextoutput$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/String(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Dcontains(Ljava/lang/CharSequence;)Zjava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Z)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseIntcharAt(I)C(I)Ljava/lang/String;(I)V(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/SaxpyTextoutput!)*+,-./0/12345P**Y**6)+,-7 8;<=5 +M+ N*+  + :, *-*Y*** 6* 1235#6,748=9B;<748;>?z@AsBA#^CAD4EEE FG5r ,:669` v`! 9`! 9*-Y"#Y$%'* -Y"#Y* k$%62 @ACDF-G>ILLSMnNuOQ7\ 8;HIJKLMNOPAQRwSRtT2U LVD!> WXYZ[E !#\]^_AF`5b*+&,'-(6)748;abcbdMeO\]^fgh: )9 PEGASUS/classes/pegasus/ConCmptBlock$RedStage1.class0000644000000000000000000001237711443145617021120 0ustar rootroot2 .|0 }~  -2 -  |   |         *   * - block_widthIrecursive_diagmult()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses Lpegasus/ConCmptBlock$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V block_row line_textLjava/lang/String;line[Ljava/lang/String;i tempVectorArrLjava/util/ArrayList; cur_block cur_block_rowcur_mult_result output_vectorLorg/apache/hadoop/io/Text;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; vectorArrblockArr blockRowArr blockCount blockArrIter blockRowIterLocalVariableTypeTable@Ljava/util/ArrayList;>;?Ljava/util/ArrayList;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;VLjava/util/ArrayList;>;>;*Ljava/util/ArrayList;ULjava/util/Iterator;>;>;)Ljava/util/Iterator; StackMapTableF Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptBlock.java 34 01 21 java/lang/StringBuilderRedStage1: block_width= , recursive_diagmult=  java/util/ArrayList org/apache/hadoop/io/Text  java/lang/Integer msi  moi  org/apache/hadoop/io/IntWritable 3 @Apegasus/ConCmptBlock$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList; parseBlockValadd(Ljava/lang/Object;)ZvalueOf(I)Ljava/lang/Integer;size()IformatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;collect'(Ljava/lang/Object;Ljava/lang/Object;)Viterator()Ljava/util/Iterator;intValueminBlockVectorC(Ljava/util/ArrayList;Ljava/util/ArrayList;II)Ljava/util/ArrayList;compareVectors-(Ljava/util/ArrayList;Ljava/util/ArrayList;)Ilength(I)Vpegasus/ConCmptBlock!-./0121345/*6H7 8;<=5C*+*+ Y  *  * 6N OPBQ7C8;C>?@A5J:Y:Y:,Y,::  :' 2W 26  W6-+ !"#: #:   :  $6 : +% H*@:6*- *&:   '  :Ч *&: ( !:)-*Y +"W6%UV WY\,]5_<`Mb]cfdqfth{ijmpqrstvxyz{|} z!*5FI7f B1 ,ECD5<EF 3G16HIJI K1 LI *MNJ8;JOPJQRJSTJUVGWI >XI5YI{Z1[R \R ]f 6H^J_ L^ JQ`JSaGW^ >Xb5Yc[d \e fc ggg7hi# jj?kljmngggjjggg(  $opqrA@s5_ *+*,-,6H74 8; tu vR wT xVopqyz{: -9 PEGASUS/classes/pegasus/PagerankBlock$RedStage1.class0000644000000000000000000001317411443145620021273 0ustar rootroot2 02 ! /            !   !  % %  +  / block_widthI()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses!Lpegasus/PagerankBlock$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V block_row line_textLjava/lang/String;line[Ljava/lang/String;elemLpegasus/VectorElem;cur_mult_result_iterLjava/util/Iterator; cur_blockLjava/util/ArrayList; cur_block_rowcur_mult_resultcur_block_outputkey"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; vector_valF to_nodes_list to_val_list vectorArrblockArr blockRowArr blockCount self_outputLorg/apache/hadoop/io/Text; blockArrIter blockRowIterLocalVariableTypeTable>Ljava/util/Iterator;>;>Ljava/util/ArrayList;>;?Ljava/util/ArrayList;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;*Ljava/util/ArrayList;(Ljava/util/ArrayList;ULjava/util/ArrayList;>;>;TLjava/util/Iterator;>;>;)Ljava/util/Iterator; StackMapTableG Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankBlock.java 45 23 java/lang/StringBuilderRedStage1: block_width=  java/util/ArrayList org/apache/hadoop/io/Text  java/lang/Double s  java/lang/Integer opegasus/VectorElem   org/apache/hadoop/io/IntWritable 4 4 ABpegasus/PagerankBlock$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList; parseBlockValadd(Ljava/lang/Object;)ZvalueOf(I)Ljava/lang/Integer;size()IformatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;collect'(Ljava/lang/Object;Ljava/lang/Object;)Viterator()Ljava/util/Iterator;intValuemultBlockVectorB(Ljava/util/ArrayList;Ljava/util/ArrayList;I)Ljava/util/ArrayList;rowSval-(Ljava/lang/Object;)Ljava/lang/StringBuilder;(I)Vpegasus/PagerankBlock!/0123456/*7B8 9<=>6f**+Y * 7G I)J8*9<*?@AB6l 8Y:Y:: Y: Y: ,Y,:  :   : '  2W 26 W 6    : -+  : ::!"6 *#:$: :Z%:$Y & :Y ' ( & )* :-+Y,Y-=7&OQ RTU!V*X3[A\J^Q_barb{ceghilmpqrstwxyz|}~"8cf~8{ C3AEDE J<FG HHIyJKLMN3OMPE9<QRSKTUVWXY vZMm[Mj\M !a]M *X^M _3 `a bKcKdz yJeLfOgShTi vZjm[kj\g !a]l *X^j bmcno * pqrstuuuuu7vw#xrrIpqrstuuuuuxrruuvr2y-pqrstuuuuuxrrz{|}AA~6_ *++,-.7B84 9<  K U Wz{|; /: PEGASUS/classes/pegasus/PagerankBlock$MapStage2.class0000644000000000000000000000455711443145620021304 0ustar rootroot2X 6 78 9:; <= >? @ ABC DFGH()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClasses!Lpegasus/PagerankBlock$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;node_key"Lorg/apache/hadoop/io/IntWritable;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsI Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankBlock.java  JK L MN org/apache/hadoop/io/IntWritableO PQ Rorg/apache/hadoop/io/Text ST UV!org/apache/hadoop/io/LongWritable Wpegasus/PagerankBlock$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankBlock! /* /,:Y2:-Y2  .H//// /!" $#$%&' /()*+,A-b*+ ,- 4./0/1 2")*+345  E PEGASUS/classes/pegasus/ConCmptBlock$MapStage4.class0000644000000000000000000000643211443145617021121 0ustar rootroot2 T UV WX Y Z[\ T] ^ _ ` ab `c de dfgh ijk l mno prst block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClasses Lpegasus/ConCmptBlock$MapStage4; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Velem_row component_idkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;tokensiblock_idLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablerojuv: Exceptionsw Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptBlock.java !x yz{ |} ~ java/lang/StringBuilderMapStage4: block_width =      org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Textmsf u !org/apache/hadoop/io/LongWritable -.pegasus/ConCmptBlock$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock! !"/*#%$ %()*"f**+Y * #* ,)-$*%(*+,-."f },:2:266Q26 `26 -Y*h `YY  #& 3 46#8.98:D<v8|>$p 8>/ D20 }%(}12}34}56}78 r9:c;:&W<#Z=> }5?@#& ABCDEFFUGHIJA-K"b*+,-#%$4%(LMNMO6P8GHIQRS' q& PEGASUS/classes/pegasus/SaxpyBlock$RedStage1.class0000644000000000000000000000737711443145620020657 0ustar rootroot2 V W XY Z[ \]^ V_ ` a b cd ef egh b ij ik lm n opq rtuv block_widthI()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/SaxpyBlock$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strLjava/lang/String;ikey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;v1[Dv2result input_index new_val_strLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTabletqwxy9z Exceptions{ Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileSaxpyBlock.java  | }~  java/lang/StringBuilder=DotProductBlock:RedStage1 : configure is called. block_width=  w org/apache/hadoop/io/Text z x  org/apache/hadoop/io/IntWritable +,pegasus/SaxpyBlock$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;pegasus/MatvecUtilsdecodeBlockVector(Ljava/lang/String;I)[DencodeBlockVector([DI)Ljava/lang/String;length()Icollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/SaxpyBlock! 9 **! ^`"  #&'( f**+Y * !c d)e"*#&*)*+,  ::*:6,5,:  *: *:*%6  *}  1 1cR $6  *Q  1R $6  *+  1R 6  * R *:  -+Y !vijkmn(p-q8r>tIuLxVybzryx{|}|~"(!-. Y/ / / #&0123456789:9;9<9/ =. >2?4@AL  BCDEFGGG,H    "HIJKLA+M _ *+,-!^"4 #& NO P3 Q5 R7IJKSTU% s$ PEGASUS/classes/pegasus/PagerankNaive$PrCounters.class0000644000000000000000000000202311443145620021617 0ustar rootroot21 # $%' ( ) * +,CONVERGE_CHECK PrCounters InnerClasses"Lpegasus/PagerankNaive$PrCounters;$VALUES#[Lpegasus/PagerankNaive$PrCounters;values%()[Lpegasus/PagerankNaive$PrCounters;CodeLineNumberTablevalueOf6(Ljava/lang/String;)Lpegasus/PagerankNaive$PrCounters;LocalVariableTablenameLjava/lang/String;(Ljava/lang/String;I)Vthis Signature()V4Ljava/lang/Enum; SourceFilePagerankNaive.java  -./ pegasus/PagerankNaive$PrCounters 0   java/lang/Enumclone()Ljava/lang/Object;pegasus/PagerankNaive5(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;@1 @  " ) 5 *)  1*+) 3Y Y S) !" & @PEGASUS/classes/pegasus/PagerankNaive$RedStage2.class0000644000000000000000000001001111443145620021267 0ustar rootroot2 (_ '` 'a 'b 'c 'd* ef gh, ij/ klm _n o pq rst u vw xy xz{ u |} |~   ' number_nodesImixing_cD random_coeffconverge_thresholdchange_reported()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClasses!Lpegasus/PagerankNaive$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strLjava/lang/String;diffkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; next_rank previous_rankLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankNaive.java 12 *+ ,- .- /- 0+    java/lang/StringBuilderRedStage2: number_nodes =  , mixing_c = , random_coeff = , converge_threshold =   org/apache/hadoop/io/Text v 1     org/apache/hadoop/io/IntWritable >?pegasus/PagerankNaive$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/util/IteratorhasNext()Znext()Ljava/lang/Object;charAt(I)C substring(I)Ljava/lang/String;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vjava/lang/Mathabs(D)D pegasus/PagerankNaive$PrCounters PrCountersCONVERGE_CHECK"Lpegasus/PagerankNaive$PrCounters;!org/apache/hadoop/mapred/Reporter incrCounter(Ljava/lang/Enum;J)Vpegasus/PagerankNaive!'()*+,-.-/-0+123\******4 5 69:;3x*+ *+  **g*o*+   Y****4 *7w5x69x<=>?3 99,;,:  s  9  c9*k*c9-+YY !*'g"9  *# $*4B(6DGUw~5\ '@A B- 69CDEFGHIJK-L-MENGOP/Q ZRSTUA>V3_ *+%,-&454 69 WX YF ZH [JRST\]^8'7 @PEGASUS/classes/pegasus/Hadi.class0000644000000000000000000002574311443145617015704 0ustar rootroot2                       ! " # $% & '()*+ ,-./ ,0 ,12345 +6 +7 +89:;<=> +?@ ABC  D EFGH I J K LM N O P QR ST QUV QW XY Z[ \]^ +_` Za Zbc Qd Qef gh ijklmnopq Xrs Xtu vw xyz m&{ o|} o~ o ? Q } } } } }   } } } }EdgeType InnerClasses RedStage5 MapStage5 MapStage4 RedStage3 MapStage3CombinerStage2 RedStage2 MapStage2 RedStage1 MapStage1MAX_ITERATIONSIN[F iter_counter edge_pathLorg/apache/hadoop/fs/Path; curbm_path tempbm_path nextbm_path output_path radius_pathradius_summary_pathlocal_output_pathLjava/lang/String; number_nodes nreplicationnreducer edge_typeLpegasus/Hadi$EdgeType;encode_bitmask cur_radiusstart_from_newbmresume_from_radiusmake_symmetric()VCodeLineNumberTableLocalVariableTablethisLpegasus/Hadi;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ifs!Lorg/apache/hadoop/fs/FileSystem;new_pathriLpegasus/HadiResultInfo;ji max_iterationeff_diameter_computedZ StackMapTablegen_bitmask_cmd_file (IILorg/apache/hadoop/fs/Path;)Vlen start_pos max_filesize"(IIIILorg/apache/hadoop/fs/Path;)V cur_nodeid thresholdcount file_namefileLjava/io/FileWriter;outLjava/io/BufferedWriter;%z{ configStage1;(Lpegasus/Hadi$EdgeType;)Lorg/apache/hadoop/mapred/JobConf;edgeTypeconf"Lorg/apache/hadoop/mapred/JobConf; configStage2$()Lorg/apache/hadoop/mapred/JobConf; configStage3 configStage4 configStage5 SourceFile Hadi.java $org/apache/hadoop/conf/Configuration pegasus/Hadi   hadi <# of vertices> <# of replication> <# of reducers> <'max' or maximum_iteration>   org/apache/hadoop/fs/Path   hadi_radiushadi_radius_summaryenc newbmcont  makesymmax= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder1[PEGASUS] Computing Radii/Diameter. Current hop:   , edge_path:  , encode: , # reducers:  , makesym: , max_iteration:  _temp "Generating initial bitstrings for  nodes   done8Resuming from current hadi_curbm which contains up to N()        /    Nh():  Guessed Radius(  *All the bitstrings converged. Finishing...  %Calculating the effective diameter... !Summarizing radius information... Reached Max Iteartion Total Iteration = .Neighborhood Summary: Nh(' [PEGASUS] Radii and diameter computed.[PEGASUS] Maximum diameter: [PEGASUS] Average diameter: "[PEGASUS] 90% Effective diameter: &[PEGASUS] Radii are saved in the HDFS -[PEGASUS] Radii summary is saved in the HDFS bitmask_cmd.hadi.java/io/FileWriterjava/io/BufferedWriter  # bitmask command file for HADI # number of nodes in graph =  , start_pos=)creating bitmask generation cmd for node  ~  c ./  org/apache/hadoop/mapred/JobConf   HADI_Stage1 pegasus/Hadi$MapStage1 pegasus/Hadi$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  HADI_Stage2pegasus/Hadi$MapStage2pegasus/Hadi$RedStage2pegasus/Hadi$CombinerStage2  HADI_Stage3pegasus/Hadi$MapStage3pegasus/Hadi$RedStage3 HADI_Stage4pegasus/Hadi$MapStage4 HADI_Stage5pegasus/Hadi$MapStage5pegasus/Hadi$RedStage5!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Toolpegasus/Hadi$EdgeTypejava/lang/Exceptionorg/apache/hadoop/fs/FileSystemjava/lang/Stringpegasus/HadiResultInfojava/io/IOException!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)I compareTo startsWith(Ljava/lang/String;)Z substring(I)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;print"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;copyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vpegasus/HadiUtilsreadNhoodOutput,(Ljava/lang/String;)Lpegasus/HadiResultInfo;nhF(F)Ljava/lang/StringBuilder;converged_nodes changed_nodesdelete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Zaverage_diameter([FI)Feffective_diametergetName(Ljava/io/Writer;)VwriteclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!  P********* * * * * ***B "',16;@EJ P TYY*< 0>+ *Y+2*Y+2*Y+2*Y+2*Y+2*+2 *Y *Y!*+2 *+2 +2"#* + 2$# *#+ 2%&**+ 2' + 2(# **+ 2)# + 2>*+Y,-.* /0.+2.1.* /2.* /3.*/4./5.6*+Y,+2.* /7.68*?+Y,9.* /:.6;** * *<=&+Y,>.* d/?.6@* Q6* =O**ABCW*DCW*ECW*FGY*8H*FI:+Y,*8.J./6:*YKL:@MQN`N+Y,O./P.@0QR.d/P.S/6TCU*VW*VW**WWX*YCW6H*VW*VWd *VW**WW*Y ` X*YCWZ*[CW*FGY*8H+Y,\./6+Y,].N/^.6_61+Y,`./P.@0Q6ϲa+Y,b.* d/6+Y,c.@* ddQ6+Y,e.@* dfQ6+Y,g.*h.6+Y,i.*h.5.6NS +9GU_ly b 1:Vhoy   $.<FL Q!Y"a&i'q),-.01214 5)6M7q89;\ : Vo4>" &B*396j66+6d d6*-k`6ձ& ACEGHI#J.E8LR99996 /2 n:666 +Y,l./^./6: mY n: oY p:  qr +Y,s./t./5.6r+Y,u./v.`/66K`6  +Y, /w.6r l^;6    xy*FI:  Y+Y,z. .6Y+Y,{.J. .6|VR S'T2U=WDXjY[]^_`acd[fgjk9l< ::::::74 1 ' 2 = E ( ; ;ɻ}Y*F~M,+Y,y.* /6,+Y,y.* /6,+Y,y.* /6,+Y,y.*/6,,,,Y*SY*S,*,* ,,,:qr+sGtcuvxy{|~ }Y*F~L++Y,y.* /6++Y,y.* /6++Y,y.* /6+++++Y*S+*+* +++:+Gcipw~}Y*F~L++Y,y.* /6++Y,y.* /6+++++Y*S+*+* +++6 +GMT[bqyH}Y*F~L+++Y*S+*++++& +38?FH9Y}Y*F~L+++++Y*S+*+* +++. #*9AIPWYJ3@NZ @          PEGASUS/classes/pegasus/HadiBlock$RedStage2.class0000644000000000000000000001202511443145617020411 0ustar rootroot2 .| -} -~ -4 -023  |  W           - nreplicationIencode_bitmask cur_radius block_width()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClassesLpegasus/HadiBlock$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vjtindexv_elemLpegasus/VectorElem;cur_strLjava/lang/String; cur_vectorLjava/util/ArrayList; vector_iterLjava/util/Iterator;key"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;iself_bm[[Jout_valsprefix[C saved_rad_nh[Ljava/lang/String; new_vectorLocalVariableTypeTable(Lpegasus/VectorElem;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableZ\ Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileHadiBlock.java 56 01 21 31  41 java/lang/StringBuilderRedStage2: block_width = , nreplication = , encode_bitmask = , cur_radius =  java/lang/String org/apache/hadoop/io/Text  pegasus/VectorElem u s   org/apache/hadoop/io/IntWritable BCpegasus/HadiBlock$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/util/ArrayListjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object; pegasus/GIMVparseHADIVector)(Ljava/lang/String;)Ljava/util/ArrayList;iterator()Ljava/util/Iterator;rowSvalupdateHADIBitString([JLjava/lang/String;II)[JcharAt(I)CparseHADIBitString(Ljava/lang/String;II)[JindexOf(I)I substring(II)Ljava/lang/String;makeHADIBitString6([[JI[[J[C[Ljava/lang/String;III)Ljava/util/ArrayList;formatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiBlock!-./01213141567J****8 9 :=>?7u*+*+ *+ *+  Y****8 '4t9u:=u@ABC7O**:**:*: *: 6*$6*2 P,,:  :  : 6  :  2!**"S #sZ !**$S !#U!~%6 !&Se:*  ***': -+( )*8v$-9ENTZcqx #@N9<D1E1FGqHI xJK LM D1O:=ONOOPMOQROST0U1AVW3XW$+YZ -"[\ @]K ^>F_xJ` La OPbOQc@]` d 0 efghijk efghijk efghijk'efghijklmgn efghijkopqrABs7_ *++,-,894 := tu vM wR xTopqyz{< -; PEGASUS/classes/pegasus/RWRNaive.class0000644000000000000000000002737511443145620016471 0ustar rootroot2) ( ) * + , - ./0 1 23 45 67 8 9 :?33@ ; < => (? ( @A BC BDE FG @HIJ $(K $L $M $N #1 O $PQ -( RS RTU RVWX 4( YZ[\] ^_ `ab #cd #e #f ghi jklm nop $qr s t uv P(w x yz { | }~ Z(  R R   R     s1 u F u u N R ( 1    #             #$? RedStage4 InnerClasses MapStage4 RedStage3 MapStage3 RedStage2 MapStage2 RedStage1 MapStage1converge_thresholdD edge_pathLorg/apache/hadoop/fs/Path; vector_pathnew_vector_path tempmv_pathmv_output_pathquery_raw_path query_path diff_pathlocal_output_pathLjava/lang/String; minmax_path distr_path number_nodesJ niterationImixing_c nreducersmake_symmetricfs!Lorg/apache/hadoop/fs/FileSystem;()VCodeLineNumberTableLocalVariableTablethisLpegasus/RWRNaive;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()ISaxpy(Lorg/apache/hadoop/conf/Configuration;ILorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;D)Lorg/apache/hadoop/fs/Path;conf&Lorg/apache/hadoop/conf/Configuration;nreducerpypxout_patha saxpy_result StackMapTableSaxpyTextoutputrun([Ljava/lang/String;)Ijob%Lorg/apache/hadoop/mapred/RunningJob; differencelfsi cur_iterationnew_argsnew_pathmmiLpegasus/MinMaxInfo;?gen_initial_vector(JLorg/apache/hadoop/fs/Path;)Vj milestone file_namefileLjava/io/FileWriter;outLjava/io/BufferedWriter; initial_rank/I readMinMax((Ljava/lang/String;)Lpegasus/MinMaxInfo;lineinLjava/io/BufferedReader;eLjava/io/IOException;info file_line configStage1$()Lorg/apache/hadoop/mapred/JobConf;"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage3 configStage4&(DD)Lorg/apache/hadoop/mapred/JobConf;min_rwrmax_rwr SourceFile RWRNaive.java org/apache/hadoop/fs/Pathrwr_query_norm  rwr_vector_difference  rwr_minmax  rwr_distr $org/apache/hadoop/conf/Configurationpegasus/RWRNaive    uRWRNaive <# of nodes> <# of reducers>   java/lang/Stringjava/lang/StringBuilder      pegasus/Saxpy   saxpy_output1  saxpy_outputpegasus/SaxpyTextoutput  rwr_vector rwr_tempmv rwr_mv_outputrwr_vector_new  makesym cont   rwr_output_temp = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- )[PEGASUS] Computing RWR. Max iteration = , threshold = , cur_iteration=, |V|=   pegasus/NormalizeVector ITERATION    pegasus/L1norm  l1norm difference = 0RWR vector converged. Now preparing to finish... 5Reached the max iteration. Now preparing to finish...#Finding minimum and maximum rwrs...   /  min =  , max =  !" [PEGASUS] RWR computed.:[PEGASUS] The final RWR scores are in the HDFS rwr_vector.C[PEGASUS] The minium and maximum scores are in the HDFS rwr_minmax.J[PEGASUS] The histogram of scores in 1000 bins are in the HDFS rwr_distr. rwr_init_vector.tempjava/io/FileWriterjava/io/BufferedWriter  Creating initial rwr vectors...   v . ./ pegasus/MinMaxInfo /part-00000java/io/BufferedReaderjava/io/InputStreamReaderjava/io/FileInputStreamUTF8    0java/io/IOException  org/apache/hadoop/mapred/JobConf   RWR_Stage1 pegasus/RWRNaive$MapStage1 pegasus/RWRNaive$RedStage1   !" #$ % org/apache/hadoop/io/IntWritable &org/apache/hadoop/io/Text ' RWR_Stage2pegasus/RWRNaive$MapStage2pegasus/RWRNaive$RedStage2 RWR_Stage3pegasus/RWRNaive$MapStage3pegasus/RWRNaive$RedStage3 (#org/apache/hadoop/io/DoubleWritable RWR_Stage4pegasus/RWRNaive$MapStage4pegasus/RWRNaive$RedStage4!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystem#org/apache/hadoop/mapred/RunningJob(Ljava/lang/String;)V!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintlnprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;getName(D)Ljava/lang/StringBuilder;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;Z)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Zjava/lang/Long parseLong(Ljava/lang/String;)Jjava/lang/IntegerparseInt(Ljava/lang/String;)I compareTo startsWith(Ljava/lang/String;)Z substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(J)Ljava/lang/StringBuilder;getConf(()Lorg/apache/hadoop/conf/Configuration;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;pegasus/PegasusUtilsread_l1norm_result)(Lorg/apache/hadoop/conf/Configuration;)DgetLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;(Lorg/apache/hadoop/fs/Path;)Zorg/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VcopyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vminmax(Ljava/io/Writer;)VprintwriteclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V*(Ljava/io/InputStream;Ljava/lang/String;)V(Ljava/io/Reader;)VreadLinesplit'(Ljava/lang/String;)[Ljava/lang/String;printStackTrace:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!  s********Y *Y *Y *Y * * ***B!$ %&'()"*/+<-I.V/[0a1h2m3 s TYY*<9;< 0 !"BDF f #:#Y$Y%&'()*S#Y,+*S#Y-+*S#Y$Y%&',)*S*-Y.6*/:  0W Y1 2W Y3 2W2 MN$O3PBQaRpUvVXY[]\ p=v7  f #:#Y$Y%&'()*S#Y,+*S#Y-+*S#Y$Y%&',)*S*4Y56*/:  0W Y1 2W Y3 2W2 de$f3gBhaiplvmoprt\ p=v7 6 q+6*Y+2 *Y7 *Y8 *Y9 *Y: *Y+2 *+2;*+2<*+2<+2=> **>+2?@+2A<>*+2B*CDE!$Y%F'*(G'H,I'(J'*KL')!**M/N***O#:+2S S$Y%&'*()S$Y%&'*g,)S*MPYQW=*)$Y%R'()!*STW*UT:*M*** *VW*M**** WYW#:* +S*MZY[W*M\9*M]:Y^ 0W$Y%_',)!H6`!*N*aW*N*aW*N**2W1*N*aW*N*aW*N**2W* b!c!*dTW*M]Y*D e$Y%*D'f'):*N*Y gh:$Y%i'j,k'l,)!*jlmTWn!o!p!q!Dz{ &3@M[eoy!'.4Nj{'AJR^jz} =OW_gof qq}'Jy^/ u[- 6 l6r:sYt:uYv: wxo9 6A $Y%(y' ,L')z{x6 |&!*N-0W*NY$Y%}'') Y$Y%-~'f'') J $,2=`jru{f 5 $  2 5   ? wYL$Y%*'')M&NYYY,:N-9-:2@+2Bj+2BlN :+~F  9?CKWepvy~ "HK+9Enk9?   %    "Y*ML+$Y%&'*K)+$Y%&'*,)+$Y%&'*()+++*N*0W+Y*SY*S+*+*+++:()+*G+c,i.p/w134689;Y*ML+$Y%&'*K)+$Y%&'*,)+++*N*0W+Y*S+*+*+++6 AB+CGDMFTG[IhKwLNPQS Y*ML+$Y%&'*K)+$Y%&'*,)++++*N*0W+Y*S+*++++:YZ+[G\M^T_[`bbod~egijl!"!Y*M:$Y%&'',)$Y%&'),)*N*0WY*S**:rs*tDvKxSy[zc|p~*#$%H#&'B        PEGASUS/classes/pegasus/RWRNaive$RedStage3.class0000644000000000000000000000712411443145620020225 0ustar rootroot2 W X Y Z! [\ ]^# _` abc Wd e fg hi j kl m no npq r s tuv wyz{ number_nodesImixing_cD random_coeff()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClassesLpegasus/RWRNaive$RedStage3; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_valuekey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; min_value max_value min_or_maxLocalVariableTypeTable;Ljava/util/Iterator;sLorg/apache/hadoop/mapred/OutputCollector; StackMapTableyv|}~ Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRNaive.java &' !" #$ %$    java/lang/StringBuilderRedStage2: number_nodes =  , mixing_c = , random_coeff =  | #org/apache/hadoop/io/DoubleWritable &}  org/apache/hadoop/io/IntWritable 34pegasus/RWRNaive$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V()IhasNext()Znext()Ljava/lang/Object;()D(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive! !"#$%$&'(J****) * +./0(_*+*+ **g*o Y  ***) *^*_+._1234(i o99+6 ,4,9    9  9 -+Y-+Y)> #(07?CFK^n*\ # 5$ o+.o67o89o:;o<=l>$i?$ c@" Ao8Bo:CD& EFGHI* JKLMA3N(_ *+,-)*4 +. OP Q9 R; S=JKLTUV- x, PEGASUS/classes/pegasus/SaxpyBlock$MapStage1.class0000644000000000000000000001045511443145620020651 0ustar rootroot2 /vw v .x .y .z .{G |}I6 ~ .8   v      '  '  . from_node_int"Lorg/apache/hadoop/io/IntWritable;isYpathZisXpathaD block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/SaxpyBlock$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;y_pathLjava/lang/String;x_path input_file StackMapTablemap(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vixvec[D new_val_strkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_texttabposout_keyval_strfcCLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector;Q Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileSaxpyBlock.java :; org/apache/hadoop/io/IntWritable 12 34 54 89  67 map.input.file  java/lang/StringBuilder'SaxpyBlock.MapStage1: map.input.file =  , isYpath=  , isXpath=, a= , block_width=   :org/apache/hadoop/io/Text :  !org/apache/hadoop/io/LongWritable MNpegasus/SaxpyBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/String(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/IntegerparseInt(Ljava/lang/String;)Icontains(Ljava/lang/CharSequence;)Zjava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Z)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf substring(II)Ljava/lang/String;(I)Ljava/lang/String;charAt(I)C(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/MatvecUtilsdecodeBlockVector(Ljava/lang/String;I)[DencodeBlockVector([DI)Ljava/lang/String;pegasus/SaxpyBlock!./01234546789:;<Z **Y***=)+,-/>  ?BCD<)+ M+ N*+ *+ + :, *-*Y*** *=. 2345(70899A:J;O=>>4?BEFGHIH0jJHKALLL MN< , :!"6#6`$:%6  s  v $:*-Y&'Y()c*\**: 6  *"  1    1* kR  *+: -Y&'Y ()=JBCEF'H/I=JELLMgOnPyQRSQVXZ> |(O9 yNPQ RH ?BSTUVWXYZ[H\9]9'^H/_` a WbK1= cdefgLL!h!"ijklAMm<b*+,,'--=)>4?BnopoqXrZijkstuA .@ PEGASUS/classes/pegasus/Hadi$RedStage3.class0000644000000000000000000000563111443145617017444 0ustar rootroot2z E FG FHI JK LM NO PQR E NS T PU J V W XYZ []^_ output_valLorg/apache/hadoop/io/Text;()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClassesLpegasus/Hadi$RedStage3;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vline[Ljava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;nh_sumD converged_sumI changed_sumLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable Exceptions` Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Hadi.java a bc deorg/apache/hadoop/io/Text fg h ijk lmn opjava/lang/StringBuilder fq rs ft u v wx org/apache/hadoop/io/IntWritable #$ypegasus/Hadi$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/IntegerparseInt(Ljava/lang/String;)I(D)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!/*) "#$b 966,=,:  2c9 2 `6 2 `6*Y Y      -+*2 /01 34%617=8I9L;<=\ %$%& "'()*+,-./012 324)5+67  B89:;A#<_ *+,-)4 " => ?* @, A.89:BCD! \ PEGASUS/classes/pegasus/Hadi$RedStage2.class0000644000000000000000000001321611443145617017441 0ustar rootroot2 -| ,} ,~ ,/ 12  |      E    , nreplicationIencode_bitmask cur_radius()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClassesLpegasus/Hadi$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vsp_posbSelfZicur_mask[IJ str_bitmasks[Ljava/lang/String; cur_bm_stringLjava/lang/String; cur_valuebitmask_start_indexcolonPoskey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;bitmask[J self_bitmask bitmask_lenout_val bSelfChangedcomplete_prefixCcomplete_bitstringsaved_self_prefixLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableYGJ Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Hadi.java 34 /0 10 20   java/lang/StringBuilderRedStage2: nreplication = , encode_bitmask = , cur_radius =  bs org/apache/hadoop/io/Text     c 3  org/apache/hadoop/io/IntWritable @Apegasus/Hadi$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;indexOf(I)I substring(I)Ljava/lang/String;charAt(I)C(II)Ljava/lang/String;pegasus/BitShuffleCoderdecode_bitmasks(Ljava/lang/String;I)[Isplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;I)Jlength()Ipegasus/HadiUtilsupdate_radhistory*([JLjava/lang/String;II)Ljava/lang/String;encode_bitmasks([JI)Ljava/lang/String; toHexString(J)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!,-./01020345J****68: ;<7 8;<=5\*+*+*+  Y  ***6? @A'C[D7\8;\>?@A5 Y P: Y P:6: 6 x6 : :,,:: 6`:s$6 6  6:6 1*6* :* :6* P*E*:6*+/.P .PҧD :6*/2!7/P  PΧ6*// 6  r"G:6 Y  #*d`**$: Y  #*d: k iN Y  %*d: :6 Y  :  Y  : *( Y  *&: 66** Y  /': -+Y ()6DH IJLMN O$Q(S1T?UCVLWWYbZj[m\v]^`bcdefgfklnoqrntu wx%y2{8|?wEHTbehnt} 25KSx7v B0mCD E04FG4E0%FH 8IJ?KLCMLLN08E0CD 8O0 O08;PQRSTUVW XYZY[0KqE0\L ]D ^_ $`L (aLbRcTde(fghijkklllZllfghijkklllllfghijkklllllfghijkklllllfghijkklllllm*fghijkklllll fghijkkllllln.fghijkklllfghijkklllfghijkklllR!N,/opqrA@s5_ *+*,-+6874 8; tu vS wU xWopqyz{: ,9 PEGASUS/classes/pegasus/DegDist$MapPass2.class0000644000000000000000000000440211443145620017754 0ustar rootroot2S 3 45 678 9: ; <=>? @BCD()VCodeLineNumberTableLocalVariableTablethisMapPass2 InnerClassesLpegasus/DegDist$MapPass2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; ExceptionsE Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile DegDist.java  FG H IJ org/apache/hadoop/io/IntWritableK LM NO PQ!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text Rpegasus/DegDist$MapPass2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/DegDist! /*x (,:-Y2Y| ~'>((((( ! "#$ (%&'()A*b*+ , - x4+,-,./!&'(012  A PEGASUS/classes/pegasus/PagerankInitVector$RedStage1.class0000644000000000000000000000722611443145620022330 0ustar rootroot2 #a "b "cd a "e% fg hijk al m n o p "q rst u vw xy xz{ o| } ~ p " number_nodesIinitial_weightD str_weightLjava/lang/String; from_node_int"Lorg/apache/hadoop/io/IntWritable;()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses&Lpegasus/PagerankInitVector$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strline[Ljava/lang/String; start_nodeend_nodeikeyvaluesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;LocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTabledj> Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankInitVector.java -. %& '( org/apache/hadoop/io/IntWritable +,  java/lang/Stringjava/lang/StringBuilder - )* MapStage1: number_nodes =   org/apache/hadoop/io/Text v :;$pegasus/PagerankInitVector$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;(Ljava/lang/String;)Vjava/lang/SystemoutLjava/io/PrintStream;(I)Ljava/lang/StringBuilder;java/io/PrintStreamprintlnhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;set(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankInitVector!"#$%&'()*+,-./P****Y057 8:1 2567/V*+ **o* Y Y  * Y *0= >?9@UA1V25V89:;/ w,p,::2 62 6 6 9*-*Y Y * Ƨ0. G HI K)L2N=OFPmNsRvS1f \<* S=>)J?&2A@& 6@A&w25wB,wCDwEFwGHIwCJwEKL<5 MNOPQRS<MNOPQTUVWA:X/_ *+,-!0514 25 YZ [D \F ]HTUV^_`4 "3 PEGASUS/classes/pegasus/PagerankNaive$MapStage1.class0000644000000000000000000000644311443145620021307 0ustar rootroot2 O P QR ST UVW OX Y Z [ \] [^ _`a _b _cd ef g hij kmnomake_symmetricI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses!Lpegasus/PagerankNaive$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vsrc_iddst_idkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablep< Exceptionsq Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankNaive.java ! r stu vwx yzjava/lang/StringBuilderMapStage1 : make_symmetric = {| {} ~ #p   org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  !org/apache/hadoop/io/LongWritable -.pegasus/PagerankNaive$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive! !"8 **# 24$  %()*"f**+Y * #7 9):$*%(*+,-."x ,::2v%-Y2Y2Q2626-YY2*-YY2#:>?@BC!D"F/GQLZMcN}PQS$\ ZE/c<0%(123456789:;<= 5>?@A.MBCDEA-F"b*+,-#2$4%(GHIHJ6K8BCDLMN' l& PEGASUS/classes/pegasus/PegasusUtils$MapSwapDouble.class0000644000000000000000000000504511443145620022142 0ustar rootroot2g ; <= >? >@ ABC >D EF GH I JKLM NPQR()VCodeLineNumberTableLocalVariableTablethis MapSwapDouble InnerClasses$Lpegasus/PegasusUtils$MapSwapDouble;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposIout_valLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; ExceptionsS Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java  TU V WX YZ[ \X#org/apache/hadoop/io/DoubleWritable Y]^ _` a org/apache/hadoop/io/IntWritable bc de!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text f"pegasus/PegasusUtils$MapSwapDouble&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/StringindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseInt(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(D)V(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!/* ?,:66-Y` Y >R?? ?!"?#$?%&9'(0)*#+*, ?#-./01A2b*+,-434546$7&./089: O PEGASUS/classes/pegasus/HadiBlock.class0000644000000000000000000002162111443145617016646 0ustar rootroot2                         ( ( ( (                  (       ! "# $%&'()*+, -. /0 123 _45 _67 _89 _:; _< => ?@ _AB _CD _EFGHI _JKLMNOPRTVW MapStage4 InnerClasses RedStage3 MapStage3CombinerStage2 RedStage2 MapStage2 RedStage1 MapStage1MAX_ITERATIONSIN[F iter_counter edge_pathLorg/apache/hadoop/fs/Path; curbm_path tempbm_path nextbm_path output_path radius_pathradius_summary_pathlocal_output_pathLjava/lang/String; number_nodes nreplicationnreducerencode_bitmask cur_radiusstart_from_newbmresume_from_radius block_width()VCodeLineNumberTableLocalVariableTablethisLpegasus/HadiBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result ExceptionsX printUsage()Irun([Ljava/lang/String;)Ifs!Lorg/apache/hadoop/fs/FileSystem;new_pathriLpegasus/HadiResultInfo;ji max_iteration StackMapTableYZ[ configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage3 configStage4 configStage5 SourceFileHadiBlock.java $org/apache/hadoop/conf/Configurationpegasus/HadiBlock\ ]^ _` abhadiblock <# of nodes> <# of replication> <# of reducers> c de fg org/apache/hadoop/fs/Path eh ijhadi_radius_blockhadi_radius_block_summaryencZ kjnewbm lmmax= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilderD[PEGASUS] Computing Radii/Diameter using block method. Current hop: no np , edge_path:  , encode: , # reducers: , block width: , max_iteration:  qr_tempblk s tu vwY xyz {| }~/  [  Nh(): n Guessed Radius(  *All the bitstrings converged. Finishing...  %Calculating the effective diameter... !Summarizing radius information... Reached Max Iteartion Total Iteration = .Neighborhood Summary: Nh(' [PEGASUS] Radii and diameter computed.[PEGASUS] Maximum diameter: [PEGASUS] Average diameter: "[PEGASUS] 90% Effective diameter: &[PEGASUS] Radii are saved in the HDFS r-[PEGASUS] Radii summary is saved in the HDFS  org/apache/hadoop/mapred/JobConf  HADIBlk_Stage1 epegasus/HadiBlock$MapStage1 pegasus/HadiBlock$RedStage1    ` org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text HADIBlk_Stage2pegasus/HadiBlock$MapStage2pegasus/HadiBlock$RedStage2 pegasus/HadiBlock$CombinerStage2 HADIBlk_Stage3pegasus/HadiBlock$MapStage3pegasus/HadiBlock$RedStage3HADIBlk_Stage4pegasus/HadiBlock$MapStage4HADIBlk_Stage5pegasus/Hadi$MapStage5 MapStage5pegasus/Hadi$RedStage5 RedStage5!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystemjava/lang/Stringpegasus/HadiResultInfo!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)I compareTo substring(I)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;copyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vpegasus/HadiUtilsreadNhoodOutput,(Ljava/lang/String;)Lpegasus/HadiResultInfo;nhF(F)Ljava/lang/StringBuilder;converged_nodes changed_nodesdelete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Zaverage_diameter([FI)Feffective_diametergetName:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass pegasus/Hadi!  Q********* * * * * ***B" "',16;@EJ Q TYY*< 0>+ *Y+2*Y+2*Y+2*Y+2*Y+2*+2 *Y *Y!*+2 *+2 +2"#* + 2$# ***+ 2% *+ 2+ 2&# + 2>'(Y)*+* ,-++2+.+* ,/+* ,0+*,1+,2+3*(Y)+2+* ,4+356* Q* =8*78W*98W*:8W*;<Y*5=*;>:(Y)*5+?+,3:*Y@A:6BQC`C(Y)D+,E+60FG+d,E+H,3I0J*KW*KW**LWH*KW*KWd *KW**LW*Y ` ɲM*N8WO*P8W*;<Y*5=(Y)Q+,3(Y)R+C,S+3T61(Y)U+,E+60F3ϲV(Y)W+* d,3(Y)X+6* dYF3(Y)Z+6* d[F3(Y)\+*]+3(Y)^+*]+2+3F +9GU_lyHhr|7?GQ[ilv  $,5]ck !#R/4w6 &R*3_Y*;`L+a(Y)b+* ,3c+d(Y)b+* ,3c+e(Y)b+*,3c+fg+hi+jk+Y*SY*Sl+*m+* n+op+qr+6 )*++G,c-i/p0w23578:5ɻ_Y*;`L+a(Y)b+* ,3c+d(Y)b+* ,3c+s(Y)b+* ,3c+e(Y)b+*,3c+tg+ui+vk+wx+Y*Sl+*m+* n+op+qr+>@A+BGCcDEGHIKLNPQS_Y*;`L+a(Y)b+* ,3c+d(Y)b+* ,3c+yg+zi+{k+{x+Y*Sl+*m+* n+op+qr+6 YZ+[G\M^T_[`bbqcyeghjd_Y*;`L+e(Y)b+*,3c+|g+}i+Y*Sl+*m+n+op+qr+* pq+r1t8vGwOyT{[|b~dUY_Y*;`L+~g+i+k+x+Y*Sl+*m+* n+op+or+. #*9AIPWYJ36C$%&R } { z w v u j h QS QU PEGASUS/classes/pegasus/ConCmptBlock$MapStage3.class0000644000000000000000000000466511443145617021126 0ustar rootroot2b 9 :; <=> <? <@A BC D EF GHI JLMN()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClasses Lpegasus/ConCmptBlock$MapStage3;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String; change_prefixCLocalVariableTypeTablebLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsO Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptBlock.java  PQ#R ST UV WXorg/apache/hadoop/io/TextY PZ [\ P]^ _`!org/apache/hadoop/io/LongWritable apegasus/ConCmptBlock$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/String startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)Cjava/lang/Character(C)Ljava/lang/String;(Ljava/lang/String;)Vjava/lang/Integer(I)Ljava/lang/String;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock!/* @,,:26-Y Y   " ? H@@@ @!"@#$(%&"'() @!*+ ,-./A0b*+ ,-412324"5$,-.678 K PEGASUS/classes/pegasus/Hadi$MapStage3.class0000644000000000000000000001053011443145617017441 0ustar rootroot2 -st u ,v ,w ,x3 yz {|5 }~ s     @ ? $ q ,  ,zero_id"Lorg/apache/hadoop/io/IntWritable; output_valLorg/apache/hadoop/io/Text; nreplicationIencode_bitmask()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClassesLpegasus/Hadi$MapStage3; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vbitmask[Iibitmasks[Ljava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valueoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;linecomplete_prefixC avg_bitposDconverged_count changed_countbitmask_start_index bitmask_strLjava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableIF Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Hadi.java 67 org/apache/hadoop/io/IntWritable 6 /0 34 54   java/lang/StringBuilderMapStage3: nreplication = , encode_bitmask=  #      org/apache/hadoop/io/Text  6 12 !org/apache/hadoop/io/LongWritable CDpegasus/Hadi$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException(I)V org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)CindexOf(I)I substring(I)Ljava/lang/String;pegasus/BitShuffleCoderdecode_bitmasks(Ljava/lang/String;I)[Ipegasus/FMBitmaskfind_least_zero_pos(J)Ijava/lang/Long parseLong(Ljava/lang/String;I)Jjava/lang/Mathpow(DD)Djava/lang/Double(D)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!,-./0123454678Q**Y**9: ;>?@8C*+ *+   Y **9 B:C;>CABCD8,,:2696 6 2 6 2 `: *2 *:6*.c90 :62c9*o9c6 i6 * Y Y !#$o&  '(-**()9n  " % ( + 6CKVbqwz !#%&:V!EFY!G4$HI;>JKL2MNOPQI"RSG4%TU(V4 +W4 6X4 CYZ [ M\]w K^_`abcde ^_`abcd ^_`abcdc  fghiACj8b*+*, -+9:4;>klmlnNoPfghpqr= ,< PEGASUS/classes/pegasus/ConCmptBlock$RedStage3.class0000644000000000000000000000456211443145617021117 0ustar rootroot2Y 7 89 8:; < => =? @ AB CEFG()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClasses Lpegasus/ConCmptBlock$RedStage3;reduce(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VlineLjava/lang/String; cur_valueIkeyLorg/apache/hadoop/io/Text;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable1Ljava/util/Iterator;bLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsH Signature(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptBlock.java I JK LMorg/apache/hadoop/io/Text NOP QR NS TU VW Xpegasus/ConCmptBlock$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock!  /* ?6,",:6`6-+Y " !(+>R!??? ?!"?#$<%&?'?!()'*+,-A._ *+,- 4  /0 1  2" 3$*+,456  D PEGASUS/classes/pegasus/GIMV.class0000644000000000000000000004063511443145620015570 0ustar rootroot2 I           2 2             !! " ! # $%& '(`) + +* $+ +, '- !. 2/ !0 12 +3 +45 9678 <9 :;< =>?@ =A HB CDEFGH()VCodeLineNumberTableLocalVariableTablethisLpegasus/GIMV;parseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;valIDrowSstrValLjava/lang/String;typeLjava/lang/Class;arrLjava/util/ArrayList;tokens[Ljava/lang/String;iLocalVariableTypeTableLjava/lang/Class; StackMapTable_ Signaturep(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;>;parseHADIVector)(Ljava/lang/String;)Ljava/util/ArrayList; bitstringS(Ljava/lang/String;)Ljava/util/ArrayList;>;multDiagVector<(Ljava/lang/String;Ljava/lang/String;I)Ljava/util/ArrayList;strDiagstrVec block_widthdVal1[DdVal2tokens1tokens2rf(Ljava/lang/String;Ljava/lang/String;I)Ljava/util/ArrayList;>;IsCompleteHADIVector(Ljava/util/ArrayList;)Zcur_veLpegasus/VectorElem; vector_iterLjava/util/Iterator;(Lpegasus/VectorElem;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;IB(Ljava/util/ArrayList;>;)ZminBlockVectorC(Ljava/util/ArrayList;Ljava/util/ArrayList;II)Ljava/util/ArrayList;v_elemvector_val_int vector_valLjava/lang/Integer;b_elemLpegasus/BlockElem;blockvector isFastMethodout_vals[I block_iter vector_mapLjava/util/Map; saved_b_elem result_vector)Lpegasus/VectorElem;?Ljava/util/Iterator;>;(Lpegasus/BlockElem;?Ljava/util/ArrayList;>;@Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;5Ljava/util/Map;J (Ljava/util/ArrayList;>;Ljava/util/ArrayList;>;II)Ljava/util/ArrayList;>;bworBlockVectorD(Ljava/util/ArrayList;Ljava/util/ArrayList;III)Ljava/util/ArrayList;j startpos_bmcur_maskJ vector_strout_str nreplicationencode_bitmask[[J nonzero_count4Ljava/util/Map;(Ljava/util/ArrayList;>;Ljava/util/ArrayList;>;III)Ljava/util/ArrayList;>;multBlockVectorB(Ljava/util/ArrayList;Ljava/util/ArrayList;I)Ljava/util/ArrayList; i_block_width'Lpegasus/BlockElem;(Lpegasus/VectorElem;>Ljava/util/ArrayList;>;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;=Ljava/util/Iterator;>; (Ljava/util/ArrayList;>;Ljava/util/ArrayList;>;I)Ljava/util/ArrayList;>;/([BLjava/util/ArrayList;I)Ljava/util/ArrayList; edge_elemcol[B([BLjava/util/ArrayList;>;I)Ljava/util/ArrayList;>; parseBlockValbeLpegasus/BlockElem;o(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;>;formatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;elemcur_mult_result_iterprefixcur_block_outputisFirstLpegasus/VectorElem;/Ljava/util/Iterator;>;0Ljava/util/ArrayList;>;u(Ljava/lang/String;Ljava/util/ArrayList;>;)Lorg/apache/hadoop/io/Text;formatHADIVectorElemOutputn(Ljava/lang/String;Ljava/util/ArrayList;>;)Lorg/apache/hadoop/io/Text;compareVectors-(Ljava/util/ArrayList;Ljava/util/ArrayList;)Ielem1elem2v1v2v1_iterv2_itery(Ljava/util/ArrayList;>;Ljava/util/ArrayList;>;)I printVector(Ljava/util/ArrayList;)Iv_iterI(Ljava/util/ArrayList;>;)ImakeIntVectors([II)Ljava/util/ArrayList;int_valsE([II)Ljava/util/ArrayList;>;parseHADIBitString(Ljava/lang/String;II)[J tilde_posin_strcur_bm[JupdateHADIBitString([JLjava/lang/String;II)[J isDifferent([J[JI)Ileft_bmright_bmmakeHADIBitString6([[JI[[J[C[Ljava/lang/String;III)Ljava/util/ArrayList;colonPosdiffself_bm[C saved_rad_nh cur_radius`([[JI[[J[C[Ljava/lang/String;III)Ljava/util/ArrayList;>;makeHADICombinerBitString([[JIIII)Ljava/util/ArrayList;H([[JIIII)Ljava/util/ArrayList;>; SourceFile GIMV.java JKjava/util/ArrayList KLM NOP QRInteger ST UVpegasus/VectorElem WX JY ZTDouble [\ W] ^_ `a bcI de fg Shjava/lang/String ijjava/util/HashMap VWjava/lang/Integer ka WlJ mnpegasus/BlockElem W op qrs tu~v wxjava/lang/StringBuilder yz {| }R ~java/lang/Double  J a y yorg/apache/hadoop/io/Text Jfjava/lang/Comparable   vector :   v[] =   ci0:0:1 pegasus/GIMVjava/lang/Objectjava/util/Iterator java/util/Mapsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Short parseShort(Ljava/lang/String;)Sjava/lang/Class getSimpleName()Ljava/lang/String;equals(Ljava/lang/Object;)ZparseInt(Ljava/lang/String;)IvalueOf(I)Ljava/lang/Integer;(SLjava/lang/Object;)Vadd parseDouble(Ljava/lang/String;)D(D)Ljava/lang/Double; substring(I)Ljava/lang/String;length()Iiterator()Ljava/util/Iterator;hasNext()Znext()Ljava/lang/Object;Ljava/lang/Object;charAt(I)CintValue(S)Ljava/lang/Short;put8(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;get&(Ljava/lang/Object;)Ljava/lang/Object;indexOf(I)Ipegasus/BitShuffleCoderdecode_bitmasks(Ljava/lang/String;I)[Ijava/lang/Long parseLong(Ljava/lang/String;I)Jappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;encode_bitmasks([JI)Ljava/lang/String;toString toHexString(J)Ljava/lang/String; doubleValue()D(SSLjava/lang/Object;)Vsize(I)Ljava/lang/StringBuilder;-(Ljava/lang/Object;)Ljava/lang/StringBuilder;(Ljava/lang/String;)V compareTo(Ljava/lang/Object;)Ijava/lang/SystemoutLjava/io/PrintStream;java/io/PrintStreamprintprintlnpegasus/HadiUtilsupdate_radhistory*([JLjava/lang/String;II)Ljava/lang/String;!HIJKL/*M=N OP QRLIYM*N6-i-26+ $-`2 6, Y W-+ !-`29, Y W,M2 DEHI"J.K9LLM[NfOyHSNR9STfSU"WVWXYZ[y\]r^_o`Ta Zbcde<)fg hiLBYL*M>,(,26,`2:+ Y W+M"YZ\]#`*b:\@fN>#VW*jY+`TBXY:\]/^_cde*fk lmL ޻Y:*::*:+:>RR`>>(26 `29   oR`>>&26 `29   R`>>71&1 Y11k W`>Mjp rsuvx%y-{4|9}>{FOXclt}N XVW c SU VW SU nYoYpT/`W \]qrsr%t_-u_c/ d vvdwwee+)0fx yzL/*L+"+ M,fM(*-N {|/\]*}~a {/\*}c $f L P :6O`6Y:5+:% :   O+:*:: ' :    W b!:  "#:  : 6  $. $ O $.  $ O: 6;.( Y:  Y. W`6 M$',2<H[^djmw  #,CMNH| 2,}~| 0T < S P]P]PpTPTKH`Wd}~j~')m  D] ap H 2,} S PPd}j')m  D cg+ dd dd-Dd f Lu%:6%62 P`6ۻY: +:*: : ' :     W   !:   "#:  J ~&6 `':6& $2 $2/.PڧB (:60`2)7 $2 $2/PЧ@Y: 66*:62/  r(+Y,-(-2.-/:66-+Y,-(-2/0-/: Y W`6X M1#)3<BHKUavy 39 < EHQU^kntz} #NTa| ;T,,T6^_3TY UYXT]]pTTT `WB}~H~ < K E] HTa\ a B}H < K E c: %  dd%-Edd%v(dd%v dd%ve2 dd%d dd%dv*/ dd%df L N6-R`6+:*:: :f : :!:  " "'- $\1 12323kcR :x:6:-1' Y: Y-1 W`6Mv+. /.1"2(3+556A9P:U;Y<_>kAxB{CDFGKMNOPQRNVNz Y Ar|kH ]]Tr`W"}~(~+C]a\ Y ArkH "}(+Cc3w 0d f L N6-R`6+:U :666*h`l3px~6  -\123cRʧ:6:-1' Y: Y-1 W`6MZ^a bad"f,g8i?jHk^lcmuj{p~rstuvws{Np ^T B9VT8C|?<T]Tr`W"}~C]a*8C"}Cc& w2d f RL YM*N+ K6--`26-26-`29!Y4: , W+ ?6-5-`26-26!Y 4:,W,MJ%09DV]coyN 0-VW9$WDSUV E`T!VWWr9`TXYZ[\]^_a V ZbcdeD8f LZ*M>+{+5t+:[ :,6+Y,,--/M+Y,,-6-7-8/M>9Y,:9Y6:M6 +5IrtwN>+I|k~Y]YTa +Ikcv3-f L*M>++5+: :,6+Y,,--/Mi2+Y,,-6-7--/M8+Y,,-6-7-;--/M>U9Y,:9Y6:M>+5IZN>+|~Y]YTa +cv3?4f LIb*5+5*M+N,C, :- :<<=M.    +6[]`N>+2|6'|b]b]P~K~a>+26'bbPKc Cf LU*L>?@+8+ M>+Y,A-,7B-,8/@Ų>6CM"  HKSN  (|U]P~a  (UPc  =f L2YN=$*.- Y*. W-M*0N*22pT (`T*]a *c  df L8d N5*~&6*`':6-.P**(:6-`2)P-M6  (28 ; CL\bN\ "`T'TF`TC^_dYdTdT`c$"v e L@n:+~&6+`':6**/.P3+(:6"`2)7**/P*M6  $39< D"M#[$f"l(Nf `T ,T[ G%`TD(^_nnYnTnTc e$ L>*/+/M./0.2N*`TTc  L)Y: 66: *2,2D6  }2U2I2:&6 +Y, -*-d7,22 `E-/: +Y, -*-d7/: -4iZ+Y, -F-d7/: 2S2:&6  !+Y, -2 -/: !+Y, -;-2-/: (+Y, -(-*2.-/: 66  ,+Y, -(-*2 /0-/:  Y Wv Mj: <=?%@+A?BKCDEGHIJKLMO#R)SNUXV{UY<\NK7T #T Q0T }Y %nT pT_TTT `T ] a   c[ dxv\* %%edv/ %%edvf L Y:6G:6*2/  q'+Y,-(-*2.-/:56,+Y,-(-*2/0-/: Y W^MFd fgij+k.i4m:n=pCqgsptswfzN\ YTpTTTT `T ]a  c< d %dv).%dfPEGASUS/classes/pegasus/PagerankNaive$RedStage1.class0000644000000000000000000001025711443145620021302 0ustar rootroot2 -n ,o ,p ,q/ rs 't1 uv wxy nz { |} ~  n   '     ' % , number_nodesImixing_cD random_coeff()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses!Lpegasus/PagerankNaive$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V line_textLjava/lang/String;line[Ljava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;icur_rankdst_nodes_listLjava/util/ArrayList;outdegLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;*Ljava/util/ArrayList; StackMapTableF Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankNaive.java 45 /0 12 32   java/lang/StringBuilderRedStage1: number_nodes =  , mixing_c = , random_coeff =  java/util/ArrayList org/apache/hadoop/io/Text  s 4  org/apache/hadoop/io/IntWritable java/lang/Integer 4v ABpegasus/PagerankNaive$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String;valueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Zcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vsize()I(I)Ljava/lang/Object;intValue(I)Vpegasus/PagerankNaive!,-./01232456J****7VX YZ8 9<=>6_*+*+ **g*o Y  ***7] ^_*a^b8_9<_?@AB6 9Y:,M,:  :  , v  9 2 W-+Y Y !"#$6   o96 >-%Y&'()Y Y *"#7Jgi kl#m,o3p>qLs\v_y{|}8p #9CD ,0EF 9<GHIJKLMNCO0P2 QRSS0 T IUKV QWX: Y?Z[5 \]^_`YAabcdAAe6_ *+%,-+7V84 9< fg hJ iL jNabcklm; ,: PEGASUS/classes/pegasus/PegasusUtils$MapIdentityLongText.class0000644000000000000000000000500711443145620023351 0ustar rootroot2c < => ?@ ?A BCD EF ?G H IJK LNOP()VCodeLineNumberTableLocalVariableTablethisMapIdentityLongText InnerClasses*Lpegasus/PegasusUtils$MapIdentityLongText;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vout_keyJkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposILocalVariableTypeTablejLorg/apache/hadoop/mapred/OutputCollector; StackMapTableQ ExceptionsR Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java  ST Q UV WXY Z[!org/apache/hadoop/io/LongWritable \org/apache/hadoop/io/Text W] ^_ `a b(pegasus/PegasusUtils$MapIdentityLongText&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String;indexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;)J(J)V(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!/*|   _,:627-Y Y` -Y Y "!@C^R!__ _!"_#$_%&Y'(P)*+ _#,- C./012A3b*+, -|445657$8&/019:; M PEGASUS/classes/pegasus/SaxpyBlock.class0000644000000000000000000001060411443145620017076 0ustar rootroot2 >o p qr os o tu vw vxy z{ t| } ~   o      `    (Y ([jE  ( ( ( ( ( ( RedStage1 InnerClasses MapStage1 nreducersI block_width()VCodeLineNumberTableLocalVariableTablethisLpegasus/SaxpyBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iret_valy_pathLorg/apache/hadoop/fs/Path;x_pathparam_aDfs!Lorg/apache/hadoop/fs/FileSystem; saxpy_output StackMapTablesP configSaxpyv(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;D)Lorg/apache/hadoop/mapred/JobConf;pypxaconf"Lorg/apache/hadoop/mapred/JobConf; SourceFileSaxpyBlock.java FG CD ED$org/apache/hadoop/conf/Configurationpegasus/SaxpyBlock V 0SaxpyBlock <# of reducers>  TU org/apache/hadoop/fs/Path F = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder'[PEGASUS] Computing SaxpyBlock. y_path=  , x_path=, a=    hsaxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1 saxpy_output1 fg 8 [PEGASUS] SaxpyBlock computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConf F  Lanczos.SaxpyBlock pegasus/SaxpyBlock$MapStage1 pegasus/SaxpyBlock$RedStage1    org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystem!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;(D)Ljava/lang/StringBuilder;toStringgetConf(()Lorg/apache/hadoop/conf/Configuration;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;java/lang/Stringequals(Ljava/lang/Object;)Zdelete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V(I)Ljava/lang/StringBuilder; setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!>?CDEDFGHB***I" J KL MNHTYY*< IJOPQDRS TUH0  IVWH +=*+2Y+2NY+2:+29*+2  Y- *:Y :- ! " Y#:=$W*-%&W Y' IR !-5?GJRKLOP XD!YZ-[Z5\]g^_\`Za bcddedRSfgH8(Y*):*++,,+-Y.+/Y.*0+123456Y+SY,S7-8*9:;<=I:&A^emuJ>KLhZiZ`Zj]klRSmnA5@ 3B PEGASUS/classes/pegasus/ConCmptBlock$RedStage5.class0000644000000000000000000000446311443145617021121 0ustar rootroot2O 4 56 578 9 : ;< =?@A()VCodeLineNumberTableLocalVariableTablethis RedStage5 InnerClasses Lpegasus/ConCmptBlock$RedStage5;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_countIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;count count_intLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsB Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptBlock.java C DE FG org/apache/hadoop/io/IntWritable HI JK LM Npegasus/ConCmptBlock$RedStage5&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock!   /*b 96,,6`6Y:-+"fh ij!k$m/n8oR99999 6!/ "#9$9%& '()*A+_ *+,-b4  ,- . / 0 '()123  > PEGASUS/classes/pegasus/HadiBlock$MapStage4.class0000644000000000000000000001020411443145617020413 0ustar rootroot2 &m( no pq %r stu mv w x y z{ y| }~ } } ?      % block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClassesLpegasus/HadiBlock$MapStage4; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_hopcur_nhDmax_nh ninety_thj radius_str[Ljava/lang/String; radius_infoelem_rowdfLjava/text/DecimalFormat; max_radius eff_radiuseff_nhbit_strLjava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;linetokensiblock_idLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTable@ Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileHadiBlock.java *+  () java/lang/StringBuilderMapStage4: block_width =     ~: java/text/DecimalFormat#.## * org/apache/hadoop/io/IntWritable *org/apache/hadoop/io/Textbsf  !org/apache/hadoop/io/LongWritable 78pegasus/HadiBlock$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;length()Ijava/lang/Double parseDouble(Ljava/lang/String;)D(I)Vformat(D)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiBlock!%&'()*+,/*-W. /234,f**+Y * -\ ^)_.*/2*5678,m@,:2:26 66 9 9 `2::2:d26  9 d299 k9d6G26`299 9 g goc9 26Y:-Y* h`YY    !"-~e fh#j.k1l4m7oDpLqUr`sgttuyvwxyz{}~y9j?.49)(:;]<;Q=;J>)U?@`A@LB)ACD1E) 4F; 7G; DHI@/2@JK@LM@NO@PQ 5R@&S@&T)#U) V @NWX& YZ[\]^^rYZ[\]^^_^^/YZ[\]^^_^^U YZ[\]^^`abcA7d,b*+#,-$-W.4/2efgfhOiQ`abjkl1 %0 PEGASUS/classes/pegasus/PegasusUtils$MapIdentityDouble.class0000644000000000000000000000506111443145620023017 0ustar rootroot2g ; <= >? >@ ABC DE >F GH I JKLM NPQR()VCodeLineNumberTableLocalVariableTablethisMapIdentityDouble InnerClasses(Lpegasus/PegasusUtils$MapIdentityDouble;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposIout_keyLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; ExceptionsS Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java  TU V WX YZ[ \X org/apache/hadoop/io/IntWritable ]#org/apache/hadoop/io/DoubleWritable Y^_ `a bc de!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text f&pegasus/PegasusUtils$MapIdentityDouble&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/StringindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseInt(I)V(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!/* ?,:66-Y Y` >R?? ?!"?#$?%&9'(0)*#+*, ?#-./01A2b*+,-434546$7&./089: O PEGASUS/classes/pegasus/PagerankNaive$MapStage2.class0000644000000000000000000000445511443145620021311 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClasses!Lpegasus/PagerankNaive$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankNaive.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/PagerankNaive$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive! /* +,:-Y2Y2  *>++++ +!" #$% +&'()*A+b*+ ,- 4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/ConCmptBlock.class0000644000000000000000000001762311443145617017353 0ustar rootroot2g i                     " " " "              I I I I  I    I I I I ! "#$ RedStage5 InnerClasses MapStage5 MapStage4 RedStage3 MapStage3%CombinerStage2 RedStage2 MapStage2 RedStage1 MapStage1MAX_ITERATIONSI changed_nodes[Iunchanged_nodes iter_counter edge_pathLorg/apache/hadoop/fs/Path; vector_path curbm_path tempbm_path nextbm_path output_pathcurbm_unfold_pathsummaryout_pathlocal_output_pathLjava/lang/String; number_nodes nreducers cur_radius block_widthrecursive_diagmult()VCodeLineNumberTableLocalVariableTablethisLpegasus/ConCmptBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions& printUsage()Irun([Ljava/lang/String;)Ifs!Lorg/apache/hadoop/fs/FileSystem;new_pathriLpegasus/ResultInfo;i StackMapTable'() configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage3 configStage4 configStage5 SourceFileConCmptBlock.java }~ ~ ~ ~ ~ ~ ~ ~ x x x x x$org/apache/hadoop/conf/Configurationpegasus/ConCmptBlock* +, -. /0ConCmptBlock <# of nodes> <# of reducers> 1 23 45 org/apache/hadoop/fs/Path 3 concmpt_curbmconcmpt_summaryout6 78fast( 98= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilderG[PEGASUS] Computing connected component using block method. Reducers = :; :<, block_width = =>_temp wx |x ? @A BC' DEF GH IJ/ KLM NO yz) Px QxHop  : changed = , unchanged = -All the component ids converged. Finishing... RS TU0Unfolding the block structure for easy lookup... /Summarizing connected components information... ( [PEGASUS] Connected component computed.[PEGASUS] Total Iteration = r[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as "node_id 'msf'component_id" format}[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as "component_id number_of_nodes" format.  org/apache/hadoop/mapred/JobConf V WXConCmptBlock_pass1 Y3pegasus/ConCmptBlock$MapStage1 Z[pegasus/ConCmptBlock$RedStage1 \[] ^_` ab c. org/apache/hadoop/io/IntWritable d[org/apache/hadoop/io/Text e[ConCmptBlock_pass2pegasus/ConCmptBlock$MapStage2pegasus/ConCmptBlock$RedStage2ConCmptBlock_pass3pegasus/ConCmptBlock$MapStage3pegasus/ConCmptBlock$RedStage3 f[ConCmptBlock_pass4pegasus/ConCmptBlock$MapStage4ConCmptBlock_pass5pegasus/ConCmptBlock$MapStage5pegasus/ConCmptBlock$RedStage5 {z!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tool#pegasus/ConCmptBlock$CombinerStage2java/lang/Exceptionorg/apache/hadoop/fs/FileSystemjava/lang/Stringpegasus/ResultInfo!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)I compareToappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;copyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vpegasus/ConCmptreadIterationOutput((Ljava/lang/String;)Lpegasus/ResultInfo;changed unchangeddelete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Z:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!ij wx yz {z|x}~~~~~~~~xxxxx G********* * * * *@ *:!v wxyz{"|'},16;A G TYY*<x 0+ *Y+2*Y+2*Y+2*Y+2*Y+2*Y*Y *+2 *+2 +2 ***+2 !"Y#$%* &'%* &(*"Y#+2%)%(** =+B*Y ` ,`,*-.W*/.W*0.W*12Y**3*14N"Y#**%5%&(:-*Y67:8,9O8,:O"Y#;%&<%9&=%:&(9F>-*?W-*?W-*?W-**@WA*B.W1-*?W-*?W-*?W-**@W*12Y**3C*D.WE"Y#F%,&(GH6 '5CQ^ku #8@\mt %28MU]e>@\tx X-IY*1JL+K"Y#L%* &(M+N"Y#L%*&(M+OP+QR+ST+Y*SY*SU+*V+* W+XY+Z[+2 +GMT[qynIY*1JL+K"Y#L%* &(M+\P+]R+^T+Y*SU+*V+* W+XY+Z[+.   + 18?NV^eln_VIY*1JL+_P+`R+aT+ab+Y*SU+*V+W+ZY+Z[+.  !#$#%*'9(A*F,M-T/VGdIY*1JL+K"Y#L%* &(M+cP+dR+Y*SU+*V+W+XY+Z[+* 56+7198;G<O>T@[AbCdUuIY*1JL+K"Y#L%* &(M+eP+fR+gT+gb+Y*SU+* V+* W+XY+X[+2 IJ+K1M8N?OFQUR]TeVlWsYuf?++ 8+ h,#$%'lR gk fm dn ao `p qr ^s ]t Su Qv PEGASUS/classes/pegasus/PagerankNaive.class0000644000000000000000000002424211443145620017535 0ustar rootroot2           ?33@             !" #$% #& #'( % %)* %+ ,@$ -./ %01 %234 5 6 78 9 :; < => ? @ABC %DE 7F 7GHI J 7K LMN 7O PQ dRS dT UVWXYZ[ V \ X]^ _` Xab Xcde + 7fg dhijk i l hm gn gop #qr st gcu tvw vx vyz v{| v}~ v   v v v v> RedStage4 InnerClasses MapStage4 RedStage3 MapStage3 RedStage2 MapStage2 RedStage1 MapStage1 PrCountersconverge_thresholdD edge_pathLorg/apache/hadoop/fs/Path; vector_path tempmv_path output_pathlocal_output_pathLjava/lang/String; minmax_path distr_path number_nodesI niterationmixing_c nreducersmake_symmetric()VCodeLineNumberTableLocalVariableTablethisLpegasus/PagerankNaive;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ijob%Lorg/apache/hadoop/mapred/RunningJob;c#Lorg/apache/hadoop/mapred/Counters;changedJi cur_iterationfs!Lorg/apache/hadoop/fs/FileSystem;new_pathmmiLpegasus/MinMaxInfo; StackMapTablegen_initial_vector(ILorg/apache/hadoop/fs/Path;)Vj milestone file_namefileLjava/io/FileWriter;outLjava/io/BufferedWriter; initial_rank[\ readMinMax((Ljava/lang/String;)Lpegasus/MinMaxInfo;lineinLjava/io/BufferedReader;eLjava/io/IOException;info file_linegiu configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage3 configStage4&(DD)Lorg/apache/hadoop/mapred/JobConf;min_prmax_pr SourceFilePagerankNaive.java org/apache/hadoop/fs/Path pr_minmax  pr_distr $org/apache/hadoop/conf/Configurationpegasus/PagerankNaive   PagerankNaive <# of nodes> <# of tasks>     pr_vector makesym cont  java/lang/StringBuilder _temp  = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- .[PEGASUS] Computing PageRank. Max iteration = , threshold = , cur_iteration=         Iteration = , changed reducer = 5PageRank vector converged. Now preparing to finish...  5Reached the max iteration. Now preparing to finish...(Finding minimum and maximum pageranks...  /  min = , max =   [PEGASUS] PageRank computed.8[PEGASUS] The final PageRanks are in the HDFS pr_vector.E[PEGASUS] The minium and maximum PageRanks are in the HDFS pr_minmax.r[PEGASUS] The histogram of PageRanks in 1000 bins between min_PageRank and max_PageRank are in the HDFS pr_distr. pagerank_init_vector.tempjava/io/FileWriterjava/io/BufferedWriter $Creating initial pagerank vectors...  v . ./ pegasus/MinMaxInfo /part-00000java/io/BufferedReaderjava/io/InputStreamReaderjava/io/FileInputStreamUTF8    0 java/io/IOException  org/apache/hadoop/mapred/JobConf  Pagerank_Stage1 pegasus/PagerankNaive$MapStage1 pegasus/PagerankNaive$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text Pagerank_Stage2pegasus/PagerankNaive$MapStage2pegasus/PagerankNaive$RedStage2Pagerank_Stage3pegasus/PagerankNaive$MapStage3pegasus/PagerankNaive$RedStage3 #org/apache/hadoop/io/DoubleWritablePagerank_Stage4pegasus/PagerankNaive$MapStage4pegasus/PagerankNaive$RedStage4!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tool pegasus/PagerankNaive$PrCountersjava/lang/Exceptionorg/apache/hadoop/fs/FileSystem#org/apache/hadoop/mapred/RunningJob!org/apache/hadoop/mapred/Countersjava/lang/String(Ljava/lang/String;)V!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintlnprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)I compareTo startsWith(Ljava/lang/String;)Z substring(I)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;getConf(()Lorg/apache/hadoop/conf/Configuration;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob; getCounters%()Lorg/apache/hadoop/mapred/Counters;CONVERGE_CHECK"Lpegasus/PagerankNaive$PrCounters; getCounter(Ljava/lang/Enum;)J(J)Ljava/lang/StringBuilder;delete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)ZgetLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VcopyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vminmax(Ljava/io/Writer;)VprintwriteclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V*(Ljava/io/InputStream;Ljava/lang/String;)V(Ljava/io/Reader;)VreadLinesplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)DprintStackTrace:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!  O******Y *Y  * * ***2 '9 :;<>%?2@7A=BDCID O TYY*<IKL 0RTV  +*Y+2*Y*Y+2*Y+2*+2 *+2*+2 +2 ! **>+2"#+2$>*%Y&+2'(')** o+o-.%Y&/'* 01'-23'04')** *5*67:=* *89W*:9:;:<=7%Y&>'0?'@) 0A*BW*BW**CW+*BW*BW**CWX* DE*F9W*6GY**H%Y&**'I'):* YJK:%Y&L'M2N'O2)*MOP9WQRST5\] ab&c4dBeLfVg`iljtlyn{oprtvwyz|#,5?biq{ $N`hpxf ,5?yo{q w$^/ ht ' 6 l6U:VYW:XYY:Z[o9 >@%Y&0\' 24')]^[6_`*67:  Y%Y&a'')Y%Y&,b'I'')cJ  #+18Zdlouzp 3  #1 D 3 ;t wdYeL%Y&*'f')M`NgYhYiY,jklm:nN-9-op:2q#+2rM+2rOnNs :u+~tF9?CKWepvy~HK+9Enk9?% vY*6wL+x%Y&`'* 0)y+z%Y&`'*2)y+{%Y&`'*0)y+|}+~++Y*SY*S+*+*+++6 +Gcipw vY*6wL+x%Y&`'* 0)y+z%Y&`'*2)y+%Y&`'-2)y+}+++Y*S+*+*+++6 + G b h ovvY*6wL+x%Y&`'* 0)y+z%Y&`'*2)y+%Y&`'-2)y+}++++Y*S+* ++++:+ G!b"h$o%v&}()+-.0vY*6w:%Y&`''2)y%Y&`')2)y}Y*S* *6 67*8D:K<S=[>c@sA|CEFH*-*J        ~ @PEGASUS/classes/pegasus/PegasusUtils$MapIdentity.class0000644000000000000000000000477011443145620021672 0ustar rootroot2c < => ?@ ?A BCD EF ?G H IJKL MOPQ()VCodeLineNumberTableLocalVariableTablethis MapIdentity InnerClasses"Lpegasus/PegasusUtils$MapIdentity;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vout_keyIkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableR ExceptionsS Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java  TU R VW XYZ [W org/apache/hadoop/io/IntWritable \org/apache/hadoop/io/Text X] ^_ `a!org/apache/hadoop/io/LongWritable b pegasus/PegasusUtils$MapIdentity&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String;indexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseInt(I)V(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!/*k  _,:626-Y Y` -Y Y "oprs!u@vCw^yR!__ !_"#_$%_&'Y()P*+ _$,- C./012A3b*+, -k445657%8'/019:; N PEGASUS/classes/pegasus/BlockElem.class0000644000000000000000000000121211443145617016655 0ustar rootroot2%    !"#rowScolvalLjava/lang/Object; SignatureTT;(SSLjava/lang/Object;)VCodeLineNumberTableLocalVariableTablethisLpegasus/BlockElem;in_rowin_colin_valLocalVariableTypeTableLpegasus/BlockElem;(SSTT;)V(Ljava/lang/Object; SourceFile GIMV.java $   pegasus/BlockElemjava/lang/Object()V     ****-45 678*    PEGASUS/classes/pegasus/ConCmpt$MapStage2.class0000644000000000000000000000442511443145617020144 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClassesLpegasus/ConCmpt$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile ConCmpt.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/ConCmpt$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt! /* +,:-Y2Y2  *>++++ +!" #$% +&'()*A+b*+ ,- 4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/RWRNaive$MapStage1.class0000644000000000000000000000641711443145620020232 0ustar rootroot2 O P QR ST UVW OX Y Z [ \] [^ _`a _b _cd ef g hij kmnomake_symmetricI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/RWRNaive$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vsrc_iddst_idkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablep< Exceptionsq Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRNaive.java ! r stu vwx yzjava/lang/StringBuilderMapStage1 : make_symmetric = {| {} ~ #p   org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  !org/apache/hadoop/io/LongWritable -.pegasus/RWRNaive$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive! !"8 **# +-$  %()*"f**+Y * #0 2)3$*%(*+,-."x ,::2v%-Y2Y2Q2626-YY2*-YY2#:789;<!="?/@QEZFcG}IJL$\ ZE/c<0%(123456789:;<= 5>?@A.MBCDEA-F"b*+,-#+$4%(GHIHJ6K8BCDLMN' l& PEGASUS/classes/pegasus/PagerankPrep$RedStage1.class0000644000000000000000000000617711443145620021154 0ustar rootroot2 ST S UV UWX Y Z [ \ ]^ _` a bc S d e fg h Y i jk lnop()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses Lpegasus/PagerankPrep$RedStage1;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strLjava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;iIdst_nodes_listLjava/util/ArrayList;deg elem_valueDLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;*Ljava/util/ArrayList; StackMapTableTn^qrs Exceptionst Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankPrep.java  java/util/ArrayListq uv wxorg/apache/hadoop/io/Text yz {| }~   org/apache/hadoop/io/IntWritable java/lang/Integer java/lang/StringBuilder y  r ()pegasus/PagerankPrep$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOExceptionhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;parseInt(Ljava/lang/String;)IvalueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Zsize()Iget(I)Ljava/lang/Object;intValue(I)V(I)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;(Ljava/lang/String;)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankPrep! !/*"E# $'()! Y:,",: W 6o96H- Y YY+". J LM N.O1Q8R?SITSW#f *+$',-./0123BM45 678W85?P9:; .<0= 6>?) @' ABCDE@KFGHIA(J!_ *+ ,-"E#4 $' KL M/ N1 O3FGHPQR& m% PEGASUS/classes/pegasus/JoinTablePegasus$RedPass1.class0000644000000000000000000000777311443145620021642 0ustar rootroot2 &b %c de %fg %h( ij kl* mno bp q r s tuv b wx wyz s{ | k} ~ # ~   % number_tablesI join_typesepLjava/lang/String;()VCodeLineNumberTableLocalVariableTablethisRedPass1 InnerClasses#Lpegasus/JoinTablePegasus$RedPass1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_stringtokens[Ljava/lang/String; column_index saved_valikey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;val_mapLjava/util/Map; output_valuesreceived_countLocalVariableTypeTable1Ljava/util/Iterator;bLorg/apache/hadoop/mapred/OutputCollector;6Ljava/util/Map; StackMapTable Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileJoinTablePegasus.java -. () ) *) +,   java/lang/StringBuilder.RedPass1 : configure is called. number_tables=  java/util/HashMap org/apache/hadoop/io/Text   java/lang/String -  org/apache/hadoop/io/IntWritable :;!pegasus/JoinTablePegasus$RedPass1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducer java/util/Mapjava/io/IOExceptionpegasus/JoinTablePegasus OuterJoin org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/util/IteratorhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;valueOf(I)Ljava/lang/Integer;put8(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;()I&(Ljava/lang/Object;)Ljava/lang/Object;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V!%&'()*)+,-./M****0TV WX1 2567/w7*+ *+   Y *0[ \^6_1725789:;/K  Y:,7,::2 62Wƻ Y +:66*Y : * Y *:  Y  :*-Y!Y!"$*-Y!Y!"0Zc ef g)h2jCkFm^naomp~qrtuvoz{|} ~1 #<,)=>2?)~?@, d_A) 25 BC DE FG HI JK^L,aM)N  DO FP JQR S<T:T$ UVWXA:Y/_ *+#,-$0T14 25 Z[ \E ]G ^IUVW_`a4 %d3 PEGASUS/classes/pegasus/RWRBlock.class0000644000000000000000000002443011443145620016446 0ustar rootroot2 " " " " " "  " " " " " " " " ?33@ "  " " "   "      + + + + *  + ! 4 "# "$% "&' "()*+ ,- ./ 012 "3456 "78 "9 ":; K +<=>? P@ "A BC "D "EF Y GH "IJKL "MNO "PQ "R STU "V WX YZ[ Y\ "]^_`ab sc sde sfg shi sj kl mn sop sqr sstuvwxyz{|~ s? MapStage25 InnerClasses RedStage2 MapStage2 RedStage1 MapStage1converge_thresholdD edge_pathLorg/apache/hadoop/fs/Path; vector_pathnew_vector_path tempmv_pathmv_output_pathquery_raw_path query_pathquery_block_path diff_pathvector_unfold_path minmax_path distr_pathlocal_output_pathLjava/lang/String; number_nodesJ niterationImixing_c query_nodeid nreducers block_widthfs!Lorg/apache/hadoop/fs/FileSystem;()VCodeLineNumberTableLocalVariableTablethisLpegasus/RWRBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()I SaxpyBlock(Lorg/apache/hadoop/conf/Configuration;ILorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;DI)Lorg/apache/hadoop/fs/Path;conf&Lorg/apache/hadoop/conf/Configuration;nreducerpypxout_patha saxpy_result StackMapTablerun([Ljava/lang/String;)Ijob%Lorg/apache/hadoop/mapred/RunningJob; differencelfsinew_argsnew_pathmmiLpegasus/MinMaxInfo; configStage1$()Lorg/apache/hadoop/mapred/JobConf;"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage25 configStage3 configStage4&(DD)Lorg/apache/hadoop/mapred/JobConf;min_prmax_pr SourceFile RWRBlock.java org/apache/hadoop/fs/Pathrwr_query_norm  rwr_query_norm_block rwr_vector_difference  rwr_vector  rwr_minmax  rwr_distr $org/apache/hadoop/conf/Configurationpegasus/RWRBlock   nRWRBlock <# of nodes> <# of reducers>   java/lang/Stringjava/lang/StringBuilder     pegasus/SaxpyBlock   saxpy_output1  saxpy_output rwr_tempmv_blockrwr_output_blockrwr_vector_new   rwr_output_temp = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- <[PEGASUS] Computing RWR using block method. Max iteration = , threshold =   pegasus/NormalizeVector nullnosympegasus/matvec/MatvecPrep ITERATION   pegasus/L1normBlock  l1norm difference = 0RWR vector converged. Now preparing to finish... 5Reached the max iteration. Now preparing to finish...*Unfolding the block RWR to plain format... )Finding minimum and maximum RWR scores...  /  min =  , max =   [PEGASUS] RWR computed.:[PEGASUS] The final RWR scores are in the HDFS rwr_vector.A[PEGASUS] The minium and maximum RWRs are in the HDFS rwr_minmax.d[PEGASUS] The histogram of RWRs in 1000 bins between min_RWR and max_RWR are in the HDFS rwr_distr.  org/apache/hadoop/mapred/JobConf  RWRBlock_Stage1 pegasus/RWRBlock$MapStage1 pegasus/RWRBlock$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text RWRBlock_Stage2pegasus/RWRBlock$MapStage2pegasus/RWRBlock$RedStage2pegasus/ConCmptBlockRWRBlock_Stage25pegasus/RWRBlock$MapStage25pegasus/PagerankNaiveRWRBlock_Stage3pegasus/PagerankNaive$MapStage3 MapStage3pegasus/PagerankNaive$RedStage3 RedStage3 #org/apache/hadoop/io/DoubleWritableRWRBlock_Stage4pegasus/PagerankNaive$MapStage4 MapStage4pegasus/PagerankNaive$RedStage4 RedStage4!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystem#org/apache/hadoop/mapred/RunningJob(Ljava/lang/String;)V!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintlnprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;getName(D)Ljava/lang/StringBuilder;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;Z)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Zjava/lang/Long parseLong(Ljava/lang/String;)Jjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)DgetConf(()Lorg/apache/hadoop/conf/Configuration;(J)Ljava/lang/StringBuilder;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;pegasus/PegasusUtilsread_l1norm_result)(Lorg/apache/hadoop/conf/Configuration;)DgetLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;(Lorg/apache/hadoop/fs/Path;)Zorg/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VcopyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V readMinMax((Ljava/lang/String;)Lpegasus/MinMaxInfo;pegasus/MinMaxInfominmax:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!"  ********Y *Y *Y *Y *Y *Y * * ****@N"  !"#"$/%<&I'V(c)p+u,{-./0  T Y!"Y#*$<%689 0&'(&)?AC  *:*Y+Y,-./01S*Y,21S*Y-21S*Y+Y,-.301S*Y+Y,-./01S*4Y5$6 *6:  7W  Y8 9W Y: 9W6 JK$L3MBNaOPSTVWY[f = 7 }+;*Y+2 *Y+2 *Y< *Y= *Y> *Y+2 *+2?*+2@*+2@*+2@*+2A*BC&D(&+Y,E.*/F.G3H.0(**I6J*N-+2S- S-+Y,-.*/0S-+Y,-.*g30S*IKYL-$W*N- S- S-+Y,-.*M0S-+Y,-.*/0S-+Y,-.*/0S-NS-OS*IPYQ-$W=*I&+Y,R.`/0(*STW*UT:*I*** **VW*I****W*VW*N-*2S-+Y,-.*/0S*IYYZ-$W*I[9*I\:Y] 7W&+Y,^.30(G6&_(*J*`W*J*`W*J**9W1*J*`W*J*`W*J**9W* &a(&b(*cTW&d(*eTW*I\Y*C f+Y,*C.g.0:*J*Y hi:&+Y,j.k3l.m30(*kmnTW&o(&p(&q(&r(&Iab fg'h4iAjNk\lfmpnzoprtuwz{|}~)/49Rk 9IR[k4L`g\ R[$Lyg^ -"sY*I"tL+u+Y,-.*M0v+w+Y,-.*30v+x+Y,-.*/0v+yz+{|+}~*J*7W+Y*SY*S+*+*+++:+Gcipw;ϻsY*I"tL+u+Y,-.*M0v+w+Y,-.*30v++Y,-.*M0v+x+Y,-.*/0v+z+|+~*J*7W+Y*S+*+*+++>+GcqsY*ItL+x+Y,-.*/0v+z+|*J*7W+Y*S+*++++.   + 1 8ET\ahoqbsY*ItL+u+Y,-.*M0v+w+Y,-.*30v+z+|+~+*J*7W+Y*S+*++++: +!G"M$T%[&b(o*~+-/02!sY*It:+Y,-.'30v+Y,-.)30vz|~*J*7WY*S**:89*:D<K>S?[@cBpDEGIJL*G$J " " " }" {" }    PEGASUS/classes/pegasus/HadiUtils.class0000644000000000000000000000712711443145617016721 0ustar rootroot2 *p qr?s tuvwx y z{ |}~ p   t?fff y    #p # # #()VCodeLineNumberTableLocalVariableTablethisLpegasus/HadiUtils;update_radhistory*([JLjava/lang/String;II)Ljava/lang/String;cur_nhD self_bitmask[J saved_rad_nhLjava/lang/String; cur_radiusI nreplicationmax_nh ninety_maxnhtoken[Ljava/lang/String;iresultbAboveThresholdZcur_hopprev_hopprev_nhdfLjava/text/DecimalFormat; bFirstAdd StackMapTable7@weffective_diameter([FI)FdecimalFN[F max_radius thresholdaverage_diametermin_nhhsumUreadNhoodOutput,(Ljava/lang/String;)Lpegasus/HadiResultInfo;inLjava/io/BufferedReader;e&Ljava/io/UnsupportedEncodingException;Ljava/io/IOException;new_path output_pathstrlineriLpegasus/HadiResultInfo; Exceptions SourceFile Hadi.java +, : java/text/DecimalFormat#.# +  java/lang/StringBuilder  /part-00000java/io/BufferedReaderjava/io/InputStreamReaderjava/io/FileInputStreamUTF8 + + $java/io/UnsupportedEncodingExceptionjava/io/IOException pegasus/HadiResultInfo S ; ;pegasus/HadiUtilsjava/lang/Objectjava/lang/Stringjava/lang/Exceptionpegasus/FMBitmasknh_from_bitmask([JI)Dsplit'(Ljava/lang/String;)[Ljava/lang/String;(Ljava/lang/String;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;format(D)Ljava/lang/String;length()I*(Ljava/io/InputStream;Ljava/lang/String;)V(Ljava/io/Reader;)VreadLinejava/lang/Float parseFloat(Ljava/lang/String;)Fnhconverged_nodes changed_nodes )*+,-/*.H/ 01 23-1*9k9+:: 6 6 6 9Y :66   2 6  `2 9 806  ( Y  :  - Y  : 9 6 n5 - ( Y  : Y :  .jMNOQRS$T'U2V5X@YJZV[[\c]f_k`efhiXlmp.s/Vw451671891:;1<;*=5">5?@8A; B9 CD !E; $ F; ' G52HI5JDKi8LMNMOWLMNMO. LMNMO7 PQ-?*0E$j8>.*0*d0f*0*d0fn8db."y{ }~.6}</>.RS?TU?V;;=S 3A; 5WSK  ) XQ-9*0E*0F 86*0*d0fjb8%$fn8." (.6/>9TU9V;5YS1=S+Z; .[SK\ ]^-Eq Y*LMYYY+N-MNN,"N#Y$:-2%&-2 '-2 (58 5<!.:0589<=DMXcn/R0_`9ab=acqd9]e9Zf9D-g@M$hiK8MMMjCklmnoPEGASUS/classes/pegasus/Hadi$MapStage5.class0000644000000000000000000000513311443145617017446 0ustar rootroot2n A BC DEF DG DHI JKL MN O PQRS TVWX()VCodeLineNumberTableLocalVariableTablethis MapStage5 InnerClassesLpegasus/Hadi$MapStage5;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V radius_info[Ljava/lang/String; eff_radiusDkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;linetokens radius_strLjava/lang/String;LocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; StackMapTable Y ExceptionsZ Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Hadi.java  [\ Y ]^ _` ab:c de org/apache/hadoop/io/IntWritablef gh ij kl!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text mpegasus/Hadi$MapStage5&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String;split'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;length()Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/Mathround(D)J(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!/*u  V,:2:2:0:2 9 - Y Y "y z| }(~1:Uf 1$ :!" VV#$V%&V'(V)* K+ @,  6-./ V'01U2234567A8b*+,-u49:;:<(=*456>?@ U PEGASUS/classes/pegasus/RWRBlock$MapStage1.class0000644000000000000000000000520511443145620020214 0ustar rootroot2i ? @A BCD BEF GH IJ K LMN ? O @P QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/RWRBlock$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV, ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRBlock.java  XY#V Z[ \] org/apache/hadoop/io/IntWritable^ _` aorg/apache/hadoop/io/Text bc dejava/lang/StringBuilder fg!org/apache/hadoop/io/LongWritable  hpegasus/RWRBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;pegasus/RWRBlock!/*,  6,::%-Y2 Y2 ;-Y2 Y Y22 * 01246!7"9):K<>H!"#$%&'(~)*j+,- %./01(72345A6b*+, -,47898:&;(234<=> R PEGASUS/classes/pegasus/PagerankNaive$MapStage3.class0000644000000000000000000000531011443145620021301 0ustar rootroot2m BC B D EF GHI GJ GK LM NO P QRST UWXY from_node_int"Lorg/apache/hadoop/io/IntWritable;()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClasses!Lpegasus/PagerankNaive$MapStage3;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;pagerankDLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; StackMapTableZ Exceptions[ Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankNaive.java  org/apache/hadoop/io/IntWritable  \]#Z ^_ `a bcd ef g#org/apache/hadoop/io/DoubleWritable hi jk!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text !"lpegasus/PagerankNaive$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(I)V(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive!>**Y   !" V,: :2 9-Y Y-Y Y"'>URV V#$V%&V'(V)*P+,<-.'//01 V'2345678A!9b*+,-4 :;<;=(>*567?@A V PEGASUS/classes/pegasus/PegasusUtils$RedSumDouble.class0000644000000000000000000000452511443145620021773 0ustar rootroot2P 4 56 578 9 : ;<= >@AB()VCodeLineNumberTableLocalVariableTablethis RedSumDouble InnerClasses#Lpegasus/PegasusUtils$RedSumDouble;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valDkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable;Ljava/util/Iterator;sLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsC Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java D EF GH#org/apache/hadoop/io/DoubleWritable IJ KL MN org/apache/hadoop/io/IntWritable O!pegasus/PegasusUtils$RedSumDouble&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!   /*  59,,9c9-+Y  !!"$$4%H55555 !2"#5$5%& '()*A+_ *+,- 4  ,- . / 0!'()123  ? PEGASUS/classes/pegasus/PegasusUtils$RangePartition.class0000644000000000000000000000377411443145620022374 0ustar rootroot2_ 6 78 9: ; <=> 6? @ A B CD EF GIJK number_nodesI()VCodeLineNumberTableLocalVariableTablethisRangePartition InnerClasses%Lpegasus/PegasusUtils$RangePartition;LocalVariableTypeTable+Lpegasus/PegasusUtils$RangePartition; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf; getPartition8(Lorg/apache/hadoop/io/IntWritable;Ljava/lang/Object;I)Ikey"Lorg/apache/hadoop/io/IntWritable;valueLjava/lang/Object;numReduceTasksTV2; Signature*(Lorg/apache/hadoop/io/IntWritable;TV2;I)I((Ljava/lang/Object;Ljava/lang/Object;I)Ix0x1x2wLjava/lang/Object;Lorg/apache/hadoop/mapred/Partitioner; SourceFilePegasusUtils.java L MNO PQ R STjava/lang/StringBuilder+RangePartition configure(): number_nodes = UV UW XYZ [\ M] org/apache/hadoop/io/IntWritable %&^#pegasus/PegasusUtils$RangePartitionjava/lang/Object$org/apache/hadoop/mapred/Partitioner org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V()Ipegasus/PegasusUtils!A*   !"x**+Y *  )**#$ * %&t+*ok*'()*+ ),-.A%/e *+,*  0* 1* 2   -345 H PEGASUS/classes/pegasus/L1normBlock$MapStage1.class0000644000000000000000000000613011443145620020710 0ustar rootroot2 P Q RS TU VW XY XZ [\ ]^_ `a b cdef gijk block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/L1normBlock$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vikey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposval_strxvec[DsumDLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; StackMapTableieflmn6 Exceptionso Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileL1normBlock.java  p qrs tu vw n xu yz{ |}~  org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/DoubleWritable l !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text &'pegasus/L1normBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)ItoString()Ljava/lang/String;indexOf substring(I)Ljava/lang/String;pegasus/MatvecUtilsdecodeBlockVector(Ljava/lang/String;I)[Djava/lang/Mathabs(D)D(I)V(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/L1normBlock!9 ** $&  !"#F*+ ) *!$%&'_ i,:6` :* :9 6  *  1  1 c9 - Y Y . ./02%4(546>7K5Q:h;p +&( i!i)*i+,i-.i/0c12Z3O42%D56(A78 9 i-:;(+ <=>?@AABCDEFA&Gb*+,-$4!HIJIK.L0CDEMNO h PEGASUS/classes/pegasus/L1norm$MapStage1.class0000644000000000000000000000514011443145620017735 0ustar rootroot2l > ?@ AB AC AD EF GHI JK L MNOP QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/L1norm$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposIraw_valDLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile L1norm.java  XY V Z[ \] ^_` abc de org/apache/hadoop/io/IntWritable f#org/apache/hadoop/io/DoubleWritable gh ij!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text kpegasus/L1norm$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String;indexOf(Ljava/lang/String;)IcharAt(I)C substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/Mathabs(D)D(I)V(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/L1norm!/*$ # k,:699`v`9`99- Y Y * ()*,-%.90M3P6j7Rkk k!"k#$k%&e'(\)*Y+,- k#./ 901234A5b*+,-$467879$:&123;<= R PEGASUS/classes/pegasus/Hadi$RedStage1.class0000644000000000000000000001135411443145617017441 0ustar rootroot2 4z 3{ 3| 3}6 ~ '89  z   z    ' ,   '  ,   3 number_nodesI nreplicationencode_bitmask()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/Hadi$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V src_node_intexLjava/lang/Exception; bitmask_newLjava/lang/String; cur_key_int src_nodes_itLjava/util/Iterator;key"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;bitmask src_nodes_setLjava/util/Set;self_containedZ cur_valuecomplete_prefixCLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;$Ljava/util/Set; StackMapTable Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Hadi.java :; 67 87 97  java/lang/StringBuilderRedStage1: number_nodes = , nreplication = , encode_bitmask=  java/util/HashSet org/apache/hadoop/io/Textb c  java/lang/Exception)Exception at bitmask.charAt(2). bitmask=[],key= java/lang/Integer bs  org/apache/hadoop/io/IntWritable : : bo,Exception at bitmask.substring(3). bitmask=[ GHpegasus/Hadi$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/String java/util/Setjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object; startsWith(Ljava/lang/String;)Zpegasus/FMBitmaskgenerate_bitmask(III)Ljava/lang/String;valueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Z()IcharAt(I)Citerator()Ljava/util/Iterator;intValue(C)Ljava/lang/StringBuilder; substring(I)Ljava/lang/String;(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!345678797:;<J****= > ?BCD<\*+*+*+  Y  ***= '[>\?B\EFGH<~ :Y:6:,c,: :***:6   W+! 6+! Wx6 "i i6 f6 +: Y $%+!&:   '(6  +!= Y ) *+: -,Y -Y ./: Y 0 *+: -,Y -Y ./m+: Y 1%+!#ux#=% +5<FZanwz}    ;Zr u#x!z"$>aI7 &JK  LM ZLM zN7 OP z&JK ?BQRSPTUVWXM YZ[\]M ^_ ` SaTb YcdG efghijkj'Bl'g[6Bl'mnopAGq<_ *+,,-2=>4 ?B rs tP uU vWmnowxyA 3@ PEGASUS/classes/pegasus/HadiBlock$MapStage2.class0000644000000000000000000000443511443145617020422 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClassesLpegasus/HadiBlock$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileHadiBlock.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/HadiBlock$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiBlock! /* +,:-Y2Y2  *>++++ +!" #$% +&'()*A+b*+ ,- 4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/ConCmptBlock$MapStage2.class0000644000000000000000000000445111443145617021116 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClasses Lpegasus/ConCmptBlock$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptBlock.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/ConCmptBlock$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock! /* +,:-Y2Y2  *>++++ +!" #$% +&'()*A+b*+ ,- 4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/PegasusUtils$RedIdentityGen.class0000644000000000000000000000325411443145620022315 0ustar rootroot2> * +, +- ./123()VCodeLineNumberTableLocalVariableTablethisRedIdentityGen InnerClasses%Lpegasus/PegasusUtils$RedIdentityGen;LocalVariableTypeTable/Lpegasus/PegasusUtils$RedIdentityGen;reducev(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valLjava/lang/Object;keyvaluesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;TV1;TK1;Ljava/util/Iterator;4Lorg/apache/hadoop/mapred/OutputCollector; StackMapTable Exceptions4 Signaturex(TK1;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VLorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java  5 67 89: ;<=#pegasus/PegasusUtils$RedIdentityGen&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!  A*      ,,:-+   >  4   !"#$%&%'() 0 PEGASUS/classes/pegasus/ConCmpt$MapStage1.class0000644000000000000000000000661611443145617020147 0ustar rootroot2 TU T V W X% YZ [\ ]^_ T` a b c de cf ghi gjk lm n op qr suvw from_node_int"Lorg/apache/hadoop/io/IntWritable; to_node_intmake_symmetricI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/ConCmpt$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablexA Exceptionsy Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile ConCmpt.java '( org/apache/hadoop/io/IntWritable "# $# %&z {|} ~ java/lang/StringBuilderMapStage1 : make_symmetric =  #x  m org/apache/hadoop/io/Text ' {!org/apache/hadoop/io/LongWritable 45pegasus/ConCmpt$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;set(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V()Ipegasus/ConCmpt! !"#$#%&'()V **Y*Y**2456+  ,/01)f**+  Y  **9 ;)<+*,/*2345),::2)*2 -*Y2b*2 -*Y2*7*2 **-*Y2*B@ABDE!F"H.I<JTLbNwPQSTW+H,/6789:;<=>?@AB :CDEF1^GHIJA4K)b*+,-*2+4,/LMNMO;P=GHIQRS. t- PEGASUS/classes/pegasus/HadiBlock$RedStage1.class0000644000000000000000000001220011443145617020403 0ustar rootroot2 /~ . . .1 34  ~   ~       +    + . nreplicationIencode_bitmask block_width()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/HadiBlock$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V block_row line_textLjava/lang/String;line[Ljava/lang/String;partial_outputLorg/apache/hadoop/io/Text; cur_blockLjava/util/ArrayList; cur_block_rowcur_mult_resultkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; vectorArrblockArr blockRowArr self_output blockArrIter blockRowIter block_col_idLocalVariableTypeTable?Ljava/util/ArrayList;>;?Ljava/util/ArrayList;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;VLjava/util/ArrayList;>;>;*Ljava/util/ArrayList;ULjava/util/Iterator;>;>;)Ljava/util/Iterator; StackMapTableH Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileHadiBlock.java 56 12 32 42  java/lang/StringBuilderRedStage1: nreplication = , encode_bitmask=, block_width=  java/util/ArrayList org/apache/hadoop/io/Text   java/lang/Integer s  o  org/apache/hadoop/io/IntWritable 5 BCpegasus/HadiBlock$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String; pegasus/GIMVparseHADIVector)(Ljava/lang/String;)Ljava/util/ArrayList; parseBlockVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;add(Ljava/lang/Object;)ZvalueOf(I)Ljava/lang/Integer;formatHADIVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;collect'(Ljava/lang/Object;Ljava/lang/Object;)Viterator()Ljava/util/Iterator;()IintValuebworBlockVectorD(Ljava/util/ArrayList;Ljava/util/ArrayList;III)Ljava/util/ArrayList;sizeformatVectorElemOutput(I)Vpegasus/HadiBlock!./0123242567J****8GI JK9 :=>?7\*+*+*+  Y  ***8N OP'R[S9\:=\@ABC7:Y:Y:,R,::   :' 2W 26  W!":-+#$: $: +%6  X :  &6  ***':()*:-+Y ,#8rWX Y[^,_5a<bFdVe_fjhmjrksn|orstuvwy{|}9_ D2 ,>EF55GH IJFKL 7M2 "NL:=OPQRSTUVWL XLYL|}ZJm[R f\R `]2 ^\ FK_ "N`QaSbW` XcYdm[e f\f gIhhh0ij#% klmnohhhpmm[qrstABu7_ *++,--8G94 := vw xR yT zVqrs{|}< .; PEGASUS/classes/pegasus/L1normBlock.class0000644000000000000000000000743611443145620017153 0ustar rootroot2 5[ \ ]^ [_ [ `a bc bde fg `h ij k lmno [p q rs t u vwR vx y z{|} !~;  ! ! ! ! ! ! ! ! MapStage1 InnerClasses nreducersI block_width()VCodeLineNumberTableLocalVariableTablethisLpegasus/L1normBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iin_pathLorg/apache/hadoop/fs/Path;fs!Lorg/apache/hadoop/fs/FileSystem; l1norm_output StackMapTable configL1normZ(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Lorg/apache/hadoop/mapred/JobConf;out_pathconf"Lorg/apache/hadoop/mapred/JobConf; SourceFileL1normBlock.java <= 9: ;:$org/apache/hadoop/conf/Configurationpegasus/L1normBlock L 1L1normBlock <# of reducers>  JKorg/apache/hadoop/fs/Path < = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder)[PEGASUS] Computing L1normBlock. in_path=   TU 4 [PEGASUS] L1norm computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConf <  L1normBlock pegasus/L1normBlock$MapStage1 !pegasus/PegasusUtils$RedSumDouble RedSumDouble    org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/DoubleWritable !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;toStringgetConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)V(I)Ljava/lang/StringBuilder;set'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)Vpegasus/PegasusUtilssetReducerClasssetCombinerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!569:;:<=>B***?"A B@ AB CD>TYY*< ?GIJ@EFG:HI JK>0  ?PRTLM>+Y+2M*+2  Y, *NY:-W*,W Y  ?2 Z[ ^_a'bHdPe[fbgmik@4ABEF|NOPAPQ[6ROS HITU>l!Y*"N-#Y$*%&-'(-)*-+,-+--Y+S.-,/-0-12-34-?2 qr+s1u8v?wFyRzW|\~cj@*lABlNOlVO]WXHIYZ8)7 + PEGASUS/classes/pegasus/Saxpy.class0000644000000000000000000001034111443145620016121 0ustar rootroot2 ;k lm kn k op qr qst uv ow x yz{ | }~ k      \    'U 'Wf ' ' ' ' ' ' RedStage1 InnerClasses MapStage1 nreducersI()VCodeLineNumberTableLocalVariableTablethisLpegasus/Saxpy;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iret_valy_pathLorg/apache/hadoop/fs/Path;x_pathparam_aDfs!Lorg/apache/hadoop/fs/FileSystem; saxpy_output StackMapTablenL{ configSaxpyv(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;D)Lorg/apache/hadoop/mapred/JobConf;pypxaconf"Lorg/apache/hadoop/mapred/JobConf; SourceFile Saxpy.java BC @A$org/apache/hadoop/conf/Configuration pegasus/Saxpy R +Saxpy <# of reducers>  PQ org/apache/hadoop/fs/Path B = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder"[PEGASUS] Computing Saxpy. y_path=  , x_path=, a=    hsaxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1 saxpy_output1 bc 3 [PEGASUS] Saxpy computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConf B Lanczos_pass_saxpy pegasus/Saxpy$MapStage1 pegasus/Saxpy$RedStage1    org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/DoubleWritable !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystem!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;(D)Ljava/lang/StringBuilder;toStringgetConf(()Lorg/apache/hadoop/conf/Configuration;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;java/lang/Stringequals(Ljava/lang/Object;)Zdelete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!;<@ABCD8 **E "oF  GH IJDTYY*<EtvwFKLMANO PQD0  E|~RSD + =*+2Y+2NY+2:+29  Y- *:Y:-  ! Y":=#W*-$%W Y& EN !-5=u~FRGHKL TA!UV-WV5XY~gZ[\\V] ^_``a`NObcD'Y*(:)+*+,*,Y-*./0123Y+SY,S4-5*6789:E6 &AHPXioxF>GHdVeV\VfY{ghNOij>2= 0? PEGASUS/classes/pegasus/Hadi$CombinerStage2.class0000644000000000000000000001160411443145617020464 0ustar rootroot2 %q $r $s' tu vw) xyz q{ | }~        $ nreplicationIencode_bitmask()VCodeLineNumberTableLocalVariableTablethisCombinerStage2 InnerClassesLpegasus/Hadi$CombinerStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vicur_mask[IJ str_bitmasks[Ljava/lang/String;cur_value_textLorg/apache/hadoop/io/Text; cur_bm_stringLjava/lang/String;bitmask_start_index cur_valuekey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;bitmask[J bitmask_lenout_val bSelfChangedZcomplete_prefixCbStopWhileLoopLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableN;> Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Hadi.java *+ '( )(   java/lang/StringBuilderCombinerStage2: nreplication = , encode_bitmask=  boi org/apache/hadoop/io/Text *    * org/apache/hadoop/io/IntWritable 78pegasus/Hadi$CombinerStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;indexOf(I)I substring(I)Ljava/lang/String;charAt(I)C(Lorg/apache/hadoop/io/Text;)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/BitShuffleCoderdecode_bitmasks(Ljava/lang/String;I)[Isplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;I)Jencode_bitmasks([JI)Ljava/lang/String; toHexString(J)Ljava/lang/String; pegasus/Hadi!$%&'()(*+,A***- . /234,C*+*+ Y  *  * - B.C/2C5678,y Y P:6:6 x6 6 ,,:  :   6 `: s-+Y )*6* :6* P*4*:6*/.P6:6"27/Pܧs*( Y   * :66** Y   / :-+Y!-(  #.5>ITdgms{5Abhx.~9(#:;#9( :<*=>.?@ 5AB >C(IDB*9(809(y/2yEFyGHyIJyKL pMN mO(iPBfQR bST _UR VyGWyIXY Z[\]^_`LZ[\]^_`a``Z[\]^_`a``Z[\]^_`a``Z[\]^_`a``bZ[\]^_`a`` Z[\]^_`a``c& Z[\]^_` Z[\]^_`2 Z[\]^_`/ Z[\]^_`defgA7h,_ *+",-#-.4 /2 ij kH lJ mLdefnop1 $0 PEGASUS/classes/pegasus/FMBitmask.class0000644000000000000000000000364411443145616016647 0ustar rootroot2^ => ?@ = AB CD E FG HI@ HJ K? $ qLM()VCodeLineNumberTableLocalVariableTablethisLpegasus/FMBitmask;generate_bitmask(III)Ljava/lang/String;encoded_bitmaskLjava/lang/String; number_nodeIKencode_bitmaski size_bitmaskbitmaskbm_array[I StackMapTableN'create_random_bm(II)I small_bitmaskj cur_randomD thresholdnh_from_bitmask([JI)D[J avg_bitposfind_least_zero_pos(J)ImasknumberJ SourceFile Hadi.java bsi0:0:1 +,java/lang/StringBuilder OP Q RS TUV WXY Z[ \] 67pegasus/FMBitmaskjava/lang/Objectjava/lang/Stringappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;java/lang/Integer toHexString(I)Ljava/lang/String;toString()Ljava/lang/String;pegasus/BitShuffleCoderencode_bitmasks([II)Ljava/lang/String;java/lang/Mathrandom()Dpow(DD)D /*  3y 6: :>=O%Y :& :Y :2  &HNS[vR[y y! y" j# u$ q% l&'()*!' +, P J9=d$ hdc9) 6dddx6 dx62 !(+14; D"M%HD - P P$  G. L/0I104% ( ! 23*J=)*/cJ)oJ )o,. /.24**%4*! &# (50( 67"= dx> <=?@< D  8 "9: # (;<PEGASUS/classes/pegasus/JoinTablePegasus.class0000644000000000000000000001130011443145620020210 0ustar rootroot2 F{ |} { ~   { {     {      *U *V * * *    * * * *RedPass1 InnerClassesMapPass1 OuterJoinISemiJoin output_pathLorg/apache/hadoop/fs/Path; input_pathsLjava/util/ArrayList; Signature2Ljava/util/ArrayList;nreducer number_tables join_type()VCodeLineNumberTableLocalVariableTablethisLpegasus/JoinTablePegasus;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ii StackMapTable configPass1$()Lorg/apache/hadoop/mapred/JobConf;cur_pathconf"Lorg/apache/hadoop/mapred/JobConf;iterLjava/util/Iterator;fs!Lorg/apache/hadoop/fs/FileSystem;LocalVariableTypeTable1Ljava/util/Iterator; SourceFileJoinTablePegasus.java WX NOjava/util/ArrayList PQ TL UL KL VL$org/apache/hadoop/conf/Configurationpegasus/JoinTablePegasus g hJoinTablePegasus <# of reducers> ... ef Semi MLorg/apache/hadoop/fs/Path Wjava/lang/StringBuilderOutput path =  , Nreducer = , number_tables= Join type = OuterJoinJoin type = SemiJoin input path : kl Joined table is in HDFS  org/apache/hadoop/mapred/JobConf W JoinTablePegasus !pegasus/JoinTablePegasus$MapPass1 !pegasus/JoinTablePegasus$RedPass1   path   org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionjava/util/Iterator!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/String startsWith(Ljava/lang/String;)Zappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;add(Ljava/lang/Object;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClassiterator()Ljava/util/Iterator;hasNext()Znext()Ljava/lang/Object;(org/apache/hadoop/mapred/FileInputFormat addInputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPathorg/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)ZsetNumReduceTaskssetMapOutputKeyClasssetOutputKeyClasssetOutputValueClass! FGKLMLNOPQRSTLULVLWXYe'***Y* ** Z! [ '\] ^_YT Y Y *<Z[`abLcd efY0ZghY+*+2+2 * *Y+2*+dY+2* !* "* # $=+6Y%+2"*Y+2&W*'(WY)+2"ZJ &4<oy[ 9iL\]`aj ]8cdklYػ*Y*+ ,L+-Y.* "/+0Y.* "/+12+34+56=*7N-86-9:+:+Y; "</+*=+>:*?W+*@+AB+CD+CE+ZZ+GMT[]eny[>y%mO\]no]{iLespq)rst espuj evw;cdxXY! Z#yzI5 H 3 J PEGASUS/classes/pegasus/ScalarMult.class0000644000000000000000000000724311443145620017073 0ustar rootroot2 2Z [\ Z] Z ^_ `a `bc de ^f gh i jk l mnR mopq Zr s tu vw x y z{|} "~N " " " " " " MapStage1Text InnerClassesMapStage1Double nreducersI()VCodeLineNumberTableLocalVariableTablethisLpegasus/ScalarMult;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iin_pathLorg/apache/hadoop/fs/Path;sDfs!Lorg/apache/hadoop/fs/FileSystem; smult_output StackMapTableconfigScalarMult[(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;D)Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; SourceFileScalarMult.java :; 89$org/apache/hadoop/conf/Configurationpegasus/ScalarMult J ScalarMult  HIorg/apache/hadoop/fs/Path :  = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder([PEGASUS] Computing ScalarMult. In_path= , s=  TU 8 [PEGASUS] ScalarMult computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConf :  ScalarMult  pegasus/ScalarMult$MapStage1Text    org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tool"pegasus/ScalarMult$MapStage1Doublejava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/Double parseDouble(Ljava/lang/String;)DgetConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Zappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;(D)Ljava/lang/StringBuilder;toString"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)V(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!2389:;<8 **= "k>  ?@ AB<TYY*<=prs>CDE9FG HI<0  =y{}JK<++ Y+2M+2J*:Y:W  Y,) *,) W Y! =2  %08@jv>>?@CDLM~NO%uPQ0jRMS FGTU<e"Y*#:$Y%)&'()*Y+S+,,-./01=* *19FLRZb>4e?@eLMeRMeNOUVWFGXY5)4 67 PEGASUS/classes/pegasus/PegasusUtils$MapHistogramText.class0000644000000000000000000000664711443145620022710 0ustar rootroot2 !U V W# XY Z[ X\ ]^_ U` a bc d e fg eh ij ik il mno pqr s tuv wyz{ number_nodesJ nreducersI()VCodeLineNumberTableLocalVariableTablethisMapHistogramText InnerClasses'Lpegasus/PegasusUtils$MapHistogramText; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vlong_strLjava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_texttabposfirst_column_keyout_keyLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTable| Exceptions} Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePegasusUtils.java '( #$ %&~   java/lang/StringBuilder)MapHistogram configure(): number_nodes =  , nreducers=   |   org/apache/hadoop/io/IntWritable 'org/apache/hadoop/io/Text ' !org/apache/hadoop/io/LongWritable 45%pegasus/PegasusUtils$MapHistogramText&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;)JgetNumReduceTasks()Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(J)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;hashCodejava/lang/Mathabs(I)I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils! !"#$%&'()A** ** + ,/01)~>*+*+ Y  *  ** =+>,/>2345)- ^,:6 7:  77*q6 -Y Y*. !,/:E]+f ! 67 ^,/^89^:;^<=^>?X@7OA&LB$EC& D ^<EF /G HIJKA4L)b*+,-*+4,/MNONP=Q?HIJRST.  x- PEGASUS/classes/pegasus/PagerankBlock.class0000644000000000000000000002134211443145620017523 0ustar rootroot2 }          ?33@          " " " @I  " "       "          p    S S!" S#$ S%& S' ( )* +, S-. S/0 S123456789:< S>?@AC>EF MapStage25 InnerClasses RedStage2 MapStage2 RedStage1 MapStage1G PrCountersconverge_thresholdD edge_pathLorg/apache/hadoop/fs/Path; vector_path tempmv_path output_pathvector_unfold_path minmax_path distr_pathlocal_output_pathLjava/lang/String; number_nodesI niterationmixing_c nreducersmake_symmetric block_widthfs!Lorg/apache/hadoop/fs/FileSystem;()VCodeLineNumberTableLocalVariableTablethisLpegasus/PagerankBlock;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result ExceptionsH printUsage()Irun([Ljava/lang/String;)Ijob%Lorg/apache/hadoop/mapred/RunningJob;c#Lorg/apache/hadoop/mapred/Counters;changedJinew_pathmmiLpegasus/MinMaxInfo; StackMapTableIJ configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage25 configStage3 configStage4&(DD)Lorg/apache/hadoop/mapred/JobConf;min_prmax_pr SourceFilePagerankBlock.java org/apache/hadoop/fs/Path pr_vector K  pr_minmax pr_distr $org/apache/hadoop/conf/Configurationpegasus/PagerankBlockL MN OP QRPagerankBlock <# of nodes> <# of reducers> S TK UV W XYjava/lang/StringBuilder Z[_temp \] = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- A[PEGASUS] Computing PageRank using block method. Max iteration = Z^, threshold = Z_ `ab cd e fg I hi jkJ lm Iteration = , changed reducer = Zn5PageRank vector converged. Now preparing to finish... op qr5Reached the max iteration. Now preparing to finish.../Unfolding the block PageRank to plain format... (Finding minimum and maximum pageranks... stu vw/ xy z{min = | }, max = ~  [PEGASUS] PageRank computed.8[PEGASUS] The final PageRanks are in the HDFS pr_vector.E[PEGASUS] The minium and maximum PageRanks are in the HDFS pr_minmax.r[PEGASUS] The histogram of PageRanks in 1000 bins between min_PageRank and max_PageRank are in the HDFS pr_distr.  org/apache/hadoop/mapred/JobConf  Pagerank_Stage1 Kpegasus/PagerankBlock$MapStage1 pegasus/PagerankBlock$RedStage1  o   P org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text Pagerank_Stage2pegasus/PagerankBlock$MapStage2pegasus/PagerankBlock$RedStage2pegasus/ConCmptBlockPagerank_Stage25 pegasus/PagerankBlock$MapStage25pegasus/PagerankNaivePagerank_Stage3pegasus/PagerankNaive$MapStage3 MapStage3pegasus/PagerankNaive$RedStage3 RedStage3 #org/apache/hadoop/io/DoubleWritablePagerank_Stage4pegasus/PagerankNaive$MapStage4 MapStage4pegasus/PagerankNaive$RedStage4 RedStage4!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tool pegasus/PagerankBlock$PrCountersjava/lang/Exception#org/apache/hadoop/mapred/RunningJob!org/apache/hadoop/mapred/Counters(Ljava/lang/String;)V!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintlnprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;getConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob; getCounters%()Lorg/apache/hadoop/mapred/Counters;CONVERGE_CHECK"Lpegasus/PagerankBlock$PrCounters; getCounter(Ljava/lang/Enum;)J(J)Ljava/lang/StringBuilder;delete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)ZgetLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VcopyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V readMinMax((Ljava/lang/String;)Lpegasus/MinMaxInfo;pegasus/MinMaxInfominmax:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(Lorg/apache/hadoop/fs/Path;Z)Z(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!}~  b******Y *Y  *Y  ** ****@:! %2?DJQ V![" b TYY*<(*+ 0135]+ *Y+2*Y+2*Y+2*Y+2*+2!*+2!*+2!*+2!*"Y#+2$%$&'*o(o*+"Y#,$*-.$*/0$&**123=**45W*65N-7:897"Y#:$-;$<& 6=*3*>W*3*>W*3**?W1*3*>W*3*>W*3**?WN* @A*B5WC*D5W*1EY*'F"Y#*'$G$&N*3* Y-H-I:"Y#J$K/L$M/&*KMN5WOPQR0;< @A'B5CCDMEWFbGmIKMNPSTUWXY"[)\1]=^I_Y`\dhetfSijnostvwxz{)~;CKS[R]]w^ -BֻSY*1TL+U"Y#V$*-&W+X"Y#V$*/&W+Y"Y#V$*-&W+Z"Y#V$*-&W+[\+]^+_`*3*aW+Y*SY*Sb+*c+*d+ef+gh+>+Gc:λSY*1TL+U"Y#V$*-&W+X"Y#V$*/&W+i"Y#V$*/&W+Z"Y#V$*-&W+j\+k^+l`*3*aW+Y*Sb+*c+*d+ef+gh+>+Gb~qSY*1mTL+Z"Y#V$*-&W+n\+o^*3* aW+Y*Sb+* c+d+ef+gh+. +18ET\ahoqb"SY*1pTL+U"Y#V$*-&W+X"Y#V$*/&W+i"Y#V$*/&W+q\+r^+s`+st*3* aW+Y* Sb+* c+d+ef+uh+>+Gbhov}!SY*1pT:v"Y#V$'/&Ww"Y#V$)/&Wx\y^z`zt*3* aWY* Sb* c*defeh:*DKS[cp*{*$R o l k _ ] @rp; sp= ypB zpD PEGASUS/classes/pegasus/Hadi$MapStage2.class0000644000000000000000000000441111443145617017441 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClassesLpegasus/Hadi$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Hadi.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/Hadi$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi! /*- +,:-Y2Y2 2 4*5>++++ +!" #$% +&'()*A+b*+ ,- -4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/Hadi$MapStage4.class0000644000000000000000000000660711443145617017454 0ustar rootroot2 ] ^_ `ab `c `de fg hi?jk lm nop ]q r s t ^ l uvw xz{|()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClassesLpegasus/Hadi$MapStage4;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_hopIcur_nhDimax_nh ninety_th radius_info[Ljava/lang/String;dfLjava/text/DecimalFormat;key#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;linetokens max_radius eff_radiuseff_nh radius_strLjava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablezwo}~4 Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile Hadi.java !"    :  java/text/DecimalFormat#.## ! org/apache/hadoop/io/IntWritable !org/apache/hadoop/io/Textjava/lang/StringBuilderbsf } !org/apache/hadoop/io/LongWritable *+pegasus/Hadi$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String;split'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;length()Ijava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)D(Ljava/lang/String;)V(I)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;format(D)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi! !"#/*$F% &)*+#,:2:699 2:   :  ~  d2 69  d2 99  k9 d6G 2 6 `2 999 g goc9  Y:-Y2 YY$jL MNOPR)S1T:UAVNWSX`YdZl[x\]_`acd[ikm%4,-(./sI0-`\1/lP2/:34 >56&)789:;<=> ?4@4A-B/C/ )DE F ;GH8sIJKLMNNON.HPQRSA*T#b*+,-$F%4&)UVWVX<Y>PQRZ[\( y' PEGASUS/classes/pegasus/PegasusUtils$RedSumLongText.class0000644000000000000000000000525611443145620022327 0ustar rootroot2k > ?@ ?AB C DE FGH >I J K C L MNO PRST()VCodeLineNumberTableLocalVariableTablethisRedSumLongText InnerClasses%Lpegasus/PegasusUtils$RedSumLongText;reduce(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vstr_valLjava/lang/String;cur_valJkey#Lorg/apache/hadoop/io/LongWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable1Ljava/util/Iterator;jLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsU Signature(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java V WX YZorg/apache/hadoop/io/Text [\] ^_` abjava/lang/StringBuilderv cd ce fg hi!org/apache/hadoop/io/LongWritable j#pegasus/PegasusUtils$RedSumLongText&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/String substring(I)Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;)Jappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(J)Ljava/lang/StringBuilder;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!/*)  R 7,&,:7a7-+YY   "-/ 01%2,3/5Q6R !%"#RR$%R&'R()R*+O,#-R&.R(/0+1234A5_ *+,-)4  67 8' 9) :+123;<= Q PEGASUS/classes/pegasus/PagerankPrep$MapStage1.class0000644000000000000000000000631711443145620021153 0ustar rootroot2 N O PQ RS TUV NW X Y Z [\ Z] ^_` ^ab cd e fgh iklmmake_symmetricI()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses Lpegasus/PagerankPrep$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;src_iddst_idLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablen9 Exceptionso Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankPrep.java  p qrs tuv wxjava/lang/StringBuilderMapStage1 : make_symmetric = yz y{ |}~ #n   org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  !org/apache/hadoop/io/LongWritable ,-pegasus/PagerankPrep$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankPrep! !8 **" (*#  $'()!f**+Y * "- /)0#*$'**+,-!B q,::2626-YY2*-YY2"2 45689!:"<+=4>N@VApB#\ q$'q./q01q23q45k67W89+F:4=;< q2=>?@MABCDA,E!b*+,-"(#4$'FGHGI3J5ABCKLM& j% PEGASUS/classes/pegasus/HadiResultInfo.class0000644000000000000000000000053011443145616017701 0ustar rootroot2 nhFconverged_nodesI changed_nodes()VCodeLineNumberTableLocalVariableTablethisLpegasus/HadiResultInfo; SourceFile Hadi.java pegasus/HadiResultInfojava/lang/Object   /* ! PEGASUS/classes/pegasus/RWRNaive$MapStage3.class0000644000000000000000000000525711443145620020235 0ustar rootroot2m BC B D EF GHI GJ GK LM NO P QRST UWXY from_node_int"Lorg/apache/hadoop/io/IntWritable;()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClassesLpegasus/RWRNaive$MapStage3;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;rwrDLocalVariableTypeTablesLorg/apache/hadoop/mapred/OutputCollector; StackMapTableZ Exceptions[ Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRNaive.java  org/apache/hadoop/io/IntWritable  \]#Z ^_ `a bcd ef g#org/apache/hadoop/io/DoubleWritable hi jk!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text !"lpegasus/RWRNaive$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(I)V(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive!>**Y   !" V,: :2 9-Y Y-Y Y"'>URV V#$V%&V'(V)*P+,<-.'//01 V'2345678A!9b*+,-4 :;<;=(>*567?@A V PEGASUS/classes/pegasus/HadiIVGen$RedStage1.class0000644000000000000000000001051211443145617020325 0ustar rootroot2 *s )t )u )v, wx yz./ {|} s~     )   @   y ) number_nodesI nreplicationencode_bitmask()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/HadiIVGen$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V new_bitmaskLjava/lang/String;icur_textLorg/apache/hadoop/io/Text;line[Ljava/lang/String; start_nodeend_nodekey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;LocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableE Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vgenerate_bitmask(II)Ljava/lang/String;encoded_bitmask number_nodeK size_bitmaskbitmaskbm_array[Igv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileHadiIVGen.java 01 ,- .- /-   java/lang/StringBuilderRedStage1 : number_nodes = , nreplication = , encode_bitmask =   org/apache/hadoop/io/Text  _` org/apache/hadoop/io/IntWritable 0 0  vi0:0:1 ~  =>pegasus/HadiIVGen$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOExceptionjava/lang/String org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vjava/lang/Mathlog(D)Dceilpegasus/FMBitmaskcreate_random_bm(II)I toHexString(I)Ljava/lang/String;pegasus/BitShuffleCoderencode_bitmasks([II)Ljava/lang/String;pegasus/HadiIVGen!)*+,-.-/-012J****38: ;<4 589:2\*+*+*+  Y  ***3? @A'C[D4\58\;<=>2m m,f,::26266  /***: -Y Y  Ч3. J KL N)O2Q=RKScQiUlV4p K?@ 63A- UBC IDE)CF-2:G-m58mHImJKmLMmNOPmJQmLRS(5 TUVWXYZ2[\]^_`2I o"6#: :>@*$O% Y %$&:*&': Y %:32 \]^`"a*b7dY`_gghojm4Roa@58b-c-pA-{d-we@rfgSThi!*A=j2_ *+,-(3844 58 kl mK nM oO[\]pqr7 )6 PEGASUS/classes/pegasus/ConCmpt$MapStage4.class0000644000000000000000000000455211443145617020147 0ustar rootroot2Y 6 78 9:; 9< => ? @ABC DFGH()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClassesLpegasus/ConCmpt$MapStage4;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; ExceptionsI Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile ConCmpt.java  JK L MN org/apache/hadoop/io/IntWritable OPQ RS TU VW!org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text Xpegasus/ConCmpt$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt! /*% 0,::-Y2Y )*,/-H0000 0!"*#$!%&' 0()*+,A-b*+ , - %4./0/1 2")*+345  E PEGASUS/classes/pegasus/DegDist.class0000644000000000000000000001051611443145620016344 0ustar rootroot2 Br s t u vw rx r yz {| {}~  y        r      +R  + + + + + + + +RedPass2 InnerClassesMapPass2RedPass1MapPass1InDegIOutDegInOutDeg edge_pathLorg/apache/hadoop/fs/Path; node_deg_pathdeg_count_pathnreducerdeg_type()VCodeLineNumberTableLocalVariableTablethisLpegasus/DegDist;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)I deg_type_strLjava/lang/String; StackMapTable configPass1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configPass2 SourceFile DegDist.java ST MN ON PN QJ$org/apache/hadoop/conf/Configurationpegasus/DegDist c dDegDist <# of reducer>  aborg/apache/hadoop/fs/Path SIn IJ RJout KJOutinout LJInOut = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder7[PEGASUS] Computing degree distribution. Degree type =  ij nj( [PEGASUS] Degree distribution computed.,[PEGASUS] (NodeId, Degree) is saved in HDFS #, (Degree, Count) is saved in HDFS  org/apache/hadoop/mapred/JobConf S  DegDist_pass1 pegasus/DegDist$MapPass1 pegasus/DegDist$RedPass1    org/apache/hadoop/io/IntWritable  DegDist_pass2pegasus/DegDist$MapPass2pegasus/DegDist$RedPass2!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionjava/lang/String!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)V compareTo(Ljava/lang/String;)Ijava/lang/IntegerparseIntappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)V(I)Ljava/lang/StringBuilder;set'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClasssetCombinerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!BCIJKJLJMNONPNQJRJSTUS*****V! W XY Z[UTYY * < VW\]^J_` abU0   VcdU~+*Y+2*Y+2*Y+2M*+2*M+2 *M*+2  Y !","#"$*%&W*'&W ( Y )"+2"*"+2"#"$VV &47>IPVahku}W XY\]7efg  Kh_`ijU+Y*,-L+.Y /"*0$1+23+45+67* +68+Y*S9+*:+*;+<=+<>+V6 +18?IP_gov}WXYpklgPm_`njUY+Y*,-L+?3+@5+A7+A8+Y*S9+*:+*;+<=+<>+V. #*9AIPWWYXYJkl_`oTU% V#pqE"AD @F 6G 4H PEGASUS/classes/pegasus/Saxpy$RedStage1.class0000644000000000000000000000460711443145620017675 0ustar rootroot2T 8 9: 9;< = > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/Saxpy$RedStage1;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;iI val_double[DresultDLocalVariableTypeTable;Ljava/util/Iterator;sLorg/apache/hadoop/mapred/OutputCollector; StackMapTable# ExceptionsG Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Saxpy.java H IJ KL#org/apache/hadoop/io/DoubleWritable MN OP QR org/apache/hadoop/io/IntWritable Spegasus/Saxpy$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Saxpy!   /*U  U6:RR,,R11c9-+Y. YZ[ \^_,a2d=eDfTgRUUUUUR !M"#=$%&U'U()*!+,-.A/_ *+,- U4  01 2 3 4+,-567  C PEGASUS/classes/pegasus/PagerankPrep.class0000644000000000000000000000736011443145620017403 0ustar rootroot2 8] ^ _ ` ab ]c ] de fg fhi jk dlm ]n o p qrs tu v wxy z{|} ~   %B % % % % % % % RedStage1 InnerClasses MapStage1 output_pathLorg/apache/hadoop/fs/Path; edge_path nreducersImake_symmetric()VCodeLineNumberTableLocalVariableTablethisLpegasus/PagerankPrep;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ii StackMapTable configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; SourceFilePagerankPrep.java CD => ?> @A BA$org/apache/hadoop/conf/Configurationpegasus/PagerankPrep S IPagerankPrep <# of reducers>  java/lang/StringBuilderargs.length = args[] = QRorg/apache/hadoop/fs/Path C makesym = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- G[PEGASUS] Converting the adjacency matrix to column-normalized format. WX  [PEGASUS] Conversion finished.B[PEGASUS] Column normalized adjacency matrix is saved in the HDFS   org/apache/hadoop/mapred/JobConf C PagerankPrep_Stage1 pegasus/PagerankPrep$MapStage1 pegasus/PagerankPrep$RedStage1    org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/String compareTo"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!89=>?>@ABACDES*****F!] ^_`G HI JKETYY * < FeghGLMNAOP QRE0   FnprSTEj+S Y+=+- Y+2Ӹ*Y+2*Y+2*+2+2 **  * !W " Y#+2$FJxy {(|L{R~Vdr|G "4UAHILMV "/8OPWXEn%Y*&'L+(Y)**++,+-.+/0+Y*S1+*2+*3+45+67+F. +18?NV^elGnHI_YZOP[\;/: -< PEGASUS/classes/pegasus/ConCmpt$RedStage2.class0000644000000000000000000000655511443145617020147 0ustar rootroot2 VWX YZ Y[\ ] ^_ `ab cde Vf gh ] ij ^k l `m n opq rtuv()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClassesLpegasus/ConCmpt$RedStage2;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VexLjava/lang/Exception; cur_ci_stringLjava/lang/String; cur_nodeidIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;out_val bSelfChangedZchanged_prefixCcomplete_cistringcur_min_nodeidself_min_nodeidLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTabletqwxyzb Exceptions{ Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile ConCmpt.java msw |} ~org/apache/hadoop/io/Text z  java/lang/Exception java/lang/StringBuilderException! cur_ci_string=[ ] x  org/apache/hadoop/io/IntWritable &'pegasus/ConCmpt$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOExceptionhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;java/io/PrintStreamprintln(Ljava/lang/String;)VcharAt(I)C(C)Ljava/lang/StringBuilder;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!/* ! "%&'W:6x6: 6 6 ,m,: 6   6 $: Y   s 6    6    6   f6i6 Y  :-+Y/:=  j ,/:=?^imsz!?(),Y*+ /V,- "%./0123456+78 9:;+ <- =- >0?2@AT  BCDEFGG'BCDEFGGGH   IJKLA&M_ *+,- !4 "% NO P1 Q3 R5IJKSTU$ s# PEGASUS/classes/pegasus/ConCmptBlock$RedStage2.class0000644000000000000000000001063711443145617021116 0ustar rootroot2 %g' hi j $k lmn go p q r st uv uwx r yz y{| }~     } }8 }  $ block_widthI()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClasses Lpegasus/ConCmptBlock$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Viv_elemLpegasus/VectorElem;cur_strLjava/lang/String; cur_vectorLjava/util/ArrayList; vector_iterLjava/util/Iterator;key"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; self_vectorout_vals[I new_vector isDifferent out_prefixLocalVariableTypeTable)Lpegasus/VectorElem;@Ljava/util/ArrayList;>;?Ljava/util/Iterator;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableJ Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptBlock.java )* '( java/lang/StringBuilderRedStage2: block_width=   org/apache/hadoop/io/Text java/lang/Integer  pegasus/VectorElem ` msf   org/apache/hadoop/io/IntWritable 67pegasus/ConCmptBlock$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/ArrayListjava/lang/Stringjava/util/Iteratorjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;charAt(I)C substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;iterator()Ljava/util/Iterator;rowSvalintValue()ImakeIntVectors([II)Ljava/util/ArrayList;compareVectors-(Ljava/util/ArrayList;Ljava/util/ArrayList;)IformatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock!$%&'()*+/*,- .123+f**+Y * , )-*.1*4567+ +:* :6*O,,:s:::  ] :  .  O, .   OW*:6: Y  : Y  : -+  !,n #,:ESahr~*-8(~K9: :;<ak=>hd?@ +.1+AB+C@+DE+FG(H>  IJQK>HL(DM< NH~K9O ak=Phd?Q +CR+DS(HPQKPT6 UV/WUX7Y(6UWZ[\]A6^+_ *+",-#,-4 .1 _` a@ bE cGZ[\def0 $/ PEGASUS/classes/pegasus/PegasusUtils$IdentityPartition.class0000644000000000000000000000275611443145620023130 0ustar rootroot26 + ,- .012()VCodeLineNumberTableLocalVariableTablethisIdentityPartition InnerClasses(Lpegasus/PegasusUtils$IdentityPartition;LocalVariableTypeTable.Lpegasus/PegasusUtils$IdentityPartition; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf; getPartition8(Lorg/apache/hadoop/io/IntWritable;Ljava/lang/Object;I)Ikey"Lorg/apache/hadoop/io/IntWritable;valueLjava/lang/Object;numReduceTasksIcand_partitionTV2; StackMapTable Signature*(Lorg/apache/hadoop/io/IntWritable;TV2;I)I((Ljava/lang/Object;Ljava/lang/Object;I)Ix0x1x2wLjava/lang/Object;Lorg/apache/hadoop/mapred/Partitioner; SourceFilePegasusUtils.java  34 org/apache/hadoop/io/IntWritable 5&pegasus/PegasusUtils$IdentityPartitionjava/lang/Object$org/apache/hadoop/mapred/Partitionerget()Ipegasus/PegasusUtils!  A*      G      +6d   4    !"#A$ e *+,  *  % & '  "()* / PEGASUS/classes/pegasus/RWRBlock$RedStage1.class0000644000000000000000000001362211443145620020213 0ustar rootroot2 35 " 2            "   "  &  &   .  2 block_widthI()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/RWRBlock$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V vector_strLjava/lang/String;fcC block_row line_textline[Ljava/lang/String;elemLpegasus/VectorElem;cur_mult_result_iterLjava/util/Iterator; cur_blockLjava/util/ArrayList; cur_block_rowcur_mult_resultcur_block_outputkey"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; vector_valF to_nodes_list to_val_list vectorArrblockArr blockRowArr blockCount self_outputLorg/apache/hadoop/io/Text; blockArrIter blockRowIterLocalVariableTypeTable(Lpegasus/VectorElem;>Ljava/util/Iterator;>;>Ljava/util/ArrayList;>;?Ljava/util/ArrayList;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;*Ljava/util/ArrayList;(Ljava/util/ArrayList;ULjava/util/ArrayList;>;>;TLjava/util/Iterator;>;>;)Ljava/util/Iterator; StackMapTableM Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRBlock.java 78 56 java/lang/StringBuilderRedStage1: block_width=  java/util/ArrayList org/apache/hadoop/io/Text  java/lang/Double s  java/lang/Integer opegasus/VectorElem    org/apache/hadoop/io/IntWritable 7 7 DEpegasus/RWRBlock$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList; parseBlockValadd(Ljava/lang/Object;)ZvalueOf(I)Ljava/lang/Integer;size()IformatVectorElemOutputD(Ljava/lang/String;Ljava/util/ArrayList;)Lorg/apache/hadoop/io/Text;collect'(Ljava/lang/Object;Ljava/lang/Object;)Viterator()Ljava/util/Iterator;intValuemultBlockVectorB(Ljava/util/ArrayList;Ljava/util/ArrayList;I)Ljava/util/ArrayList;val doubleValue()DrowS-(Ljava/lang/Object;)Ljava/lang/StringBuilder;length(I)Vpegasus/RWRBlock!23456789/*:B; <?@A9f**+Y * :G I)J;*<?*BCDE9  8Y:Y:: Y: Y: ,w,:  :  2 :6s v :: '  2W 26 W 6    : -+  !: !::"#6 *$:%:!:j&:'(K%Y ) :Y * + ) ', :--.Y/Y0 $:-OQ RTU!V*X3[A\J^Q_U`]akbsc}defgiklmpqtuvwx{ |}~#-9IPf;U(FG] HI J6AcKG JZLM 9XNO#PQRST6 USVG<?WXYQZ[\]^_ `SaSbS !cS *dS  e6 fg hQiQj 9XNk#PlRm UnYoZp `qarbn !cs *dq htiuv* wxyz{|||||@wxyz{|||||}~} #yyIwxyz{|||||yy||}yB* wxyz{|||||yyAD9_ *+.,-1:B;4 <?  Q [ ]> 2= PEGASUS/classes/pegasus/HadiBlock$MapStage1.class0000644000000000000000000000534111443145617020416 0ustar rootroot2k A BC DEF DGH IJ KL M NOP A Q BR SUVW()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/HadiBlock$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vnode_key"Lorg/apache/hadoop/io/IntWritable;key#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableX. ExceptionsY Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileHadiBlock.java  Z[#X \] ^_ org/apache/hadoop/io/IntWritable` ab corg/apache/hadoop/io/Text de fgjava/lang/StringBuilder hi!org/apache/hadoop/io/LongWritable  jpegasus/HadiBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;pegasus/HadiBlock!/*1  ^,::)Y2 :- Y2 ?Y2 :- Y Y22 6 5679:!;"=)>9?L@OA_BD\ 9!"_,!"#$%&'()*+,r-./ '0123,;4567A8b*+, -149:;:<(=*456>?@ T PEGASUS/classes/pegasus/DegDist$RedPass1.class0000644000000000000000000000671211443145620017756 0ustar rootroot2 QR S T U! VW XY Z[\ Q] ^ _ ` ab cd ef eg h ijk Q Xl mn mo pqrsone_int"Lorg/apache/hadoop/io/IntWritable;deg_typeI()VCodeLineNumberTableLocalVariableTablethisRedPass1 InnerClassesLpegasus/DegDist$RedPass1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_degree cur_outedge outEdgeSetLjava/util/Set;keyvaluesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;degreeLocalVariableTypeTable$Ljava/util/Set;8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTablet Exceptionsu Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile DegDist.java #$ org/apache/hadoop/io/IntWritable #v  !"w xyz {|} ~java/lang/StringBuilder*RedPass1 : configure is called. degtype =   " x java/util/TreeSet t 01pegasus/DegDist$RedPass1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducer java/util/Setjava/io/IOException(I)V org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vpegasus/DegDistInOutDegjava/util/IteratorhasNext()Znext()Ljava/lang/Object;()I(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)VvalueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Zsize! !"#$%H**Y*&PRT' (+,-%f**+ Y  *&W Y)Z'*(+*./01%s6*7,,6`6-+YHY:,!,6W-+Y&:^` ab$c+d.fAhJiSjaknlqnp'\ $2"a 3"J<45(+6 789:;<="> J<4?7@9AB  C&DEFGA0H%_ *+,-&P'4 (+ IJ K8 L: M<DEFNOP* c) PEGASUS/classes/pegasus/RWRNaive$MapStage4.class0000644000000000000000000000675611443145620020243 0ustar rootroot2 "[\ [ !] !^ !_ !` !a& bc de( fgh [i j kl m no mp qrs qt qu v wxyz !{}~ from_node_int"Lorg/apache/hadoop/io/IntWritable;min_rwrDmax_rwrgap_rwr hist_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClassesLpegasus/RWRNaive$MapStage4; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;rwr distr_indexLocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; StackMapTableF Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRNaive.java ,- org/apache/hadoop/io/IntWritable $% &' (' )' *+   java/lang/StringBuilderMapStage4: min_rwr =  , max_rwr =  #  , !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text 9:pegasus/RWRNaive$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive!!"#$%&'(')'*+,-.d&**Y****/0 &1456.P*+ *+ ***g Y**/ 'O0P14P789:.- g,::2 9**gk*o`6  *` *6 -Y Y/*   ' > I Of0\ g14g;<g=>g?@gABaCDMEF'@G'>)H+ I g?JKL=MNOPQA9R.b*+,- /0414STUTV@WBNOPXYZ3 !|2 PEGASUS/classes/pegasus/ConCmptIVGen.class0000644000000000000000000001206411443145617017263 0ustar rootroot2 J                    ( * *  *   8R 8 8 8 8 8 8 8 RedStage1 InnerClasses MapStage1 input_pathLorg/apache/hadoop/fs/Path; output_path number_nodesInumber_reducersfs!Lorg/apache/hadoop/fs/FileSystem;()VCodeLineNumberTableLocalVariableTablethisLpegasus/ConCmptIVGen;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)I StackMapTable gen_cmd_file (IILorg/apache/hadoop/fs/Path;)Vend_node start_node num_nodes num_reducersi file_nameLjava/lang/String;fileLjava/io/FileWriter;outLjava/io/BufferedWriter;step configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; SourceFileConCmptIVGen.java WX OP QP RS TS$org/apache/hadoop/conf/Configurationpegasus/ConCmptIVGen g u7ConCmptIVGen <# of nodes> <# of machines> eforg/apache/hadoop/fs/Pathcc_ivcmd W = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder3[PEGASUS] Generating initial vector. Output path = , Number of nodes = , Number of machines = jk ~  UV @ [PEGASUS] Initial connected component vector generated in HDFS component_iv.tempjava/io/FileWriterjava/io/BufferedWriter W*# component vector file from ConCmptIVGen # number of nodes in graph = )creating initial vector generation cmd...  Xdone..//  org/apache/hadoop/mapred/JobConf W ConCmptIVGen_Stage1 pegasus/ConCmptIVGen$MapStage1 pegasus/ConCmptIVGen$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionjava/lang/Stringjava/io/IOException!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Z(Ljava/io/Writer;)VwriteprintclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass!JKOPQPRSTSUVWXYS*****Z!W XYZ[ \] ^_YTYY * < Z`bc[`abScd efY0   ZhjkghY;+*Y*Y+2*+2*+2  Y+2******* !W**"#$*$*%W Y&+2Z:qr uv%w/x9zA{y~[\]`ai cdjkY ':(Y):*Y+:,-Y.*- /0l66Xh6 d`hd6 d6 Y1 1 -2 3*"#:  YY4YY-567ZN!AINW^fty[ qlS ^NmS y3lS \]nSoSOPQpSqrstuvNwSDUV i'Q xyz{|"2c}~Yn8Y*"9L+:Y;*<+=>+?@+AB+Y*SC+*D+*E+FG+HI+Z. +18?NV^el[n\]_cdMAL ?N PEGASUS/classes/pegasus/PegasusUtils$RedHistogram.class0000644000000000000000000000512411443145620022025 0ustar rootroot2Y : ;< => ?@ ?AB C DE FHIJ partition_noI()VCodeLineNumberTableLocalVariableTablethis RedHistogram InnerClasses#Lpegasus/PegasusUtils$RedHistogram;LocalVariableTypeTable)Lpegasus/PegasusUtils$RedHistogram; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;countLjava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsK Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java  mapred.task.partitionL MNO PQ RS org/apache/hadoop/io/IntWritable TU VW  X!pegasus/PegasusUtils$RedHistogram&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOException org/apache/hadoop/mapred/JobConfgetInt(Ljava/lang/String;I)Ijava/util/IteratorhasNext()Znext()Ljava/lang/Object;(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!  J **     V *+        *6,,W-+Y   )>**!"*#$*%&*'(') **#**%+,-./0A1q *+,- 4  23 4$ 5& 6(  -./789  G PEGASUS/classes/pegasus/ConCmpt$RedStage3.class0000644000000000000000000000453611443145617020145 0ustar rootroot2Y 7 89 8:; < => =? @ AB CEFG()VCodeLineNumberTableLocalVariableTablethis RedStage3 InnerClassesLpegasus/ConCmpt$RedStage3;reduce(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VlineLjava/lang/String; cur_valueIkeyLorg/apache/hadoop/io/Text;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable1Ljava/util/Iterator;bLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsH Signature(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile ConCmpt.java I JK LMorg/apache/hadoop/io/Text NOP QR NS TU VW Xpegasus/ConCmpt$RedStage3&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!  /*  ?6,",:6`6-+Y " !(+>R!??? ?!"?#$<%&?'?!()'*+,-A._ *+,-  4  /0 1  2" 3$*+,456  D PEGASUS/classes/pegasus/ScalarMult$MapStage1Text.class0000644000000000000000000000675211443145620021513 0ustar rootroot2 "^_ ^ !` !a !b) cd ef !g hij ^k l m n op nq rs rt uv rw rx yz{ | }~ ! from_node_int"Lorg/apache/hadoop/io/IntWritable;isYpathZisXpathsD()VCodeLineNumberTableLocalVariableTablethis MapStage1Text InnerClasses"Lpegasus/ScalarMult$MapStage1Text; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;tabposIout_keyout_valLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTablez Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileScalarMult.java +, org/apache/hadoop/io/IntWritable $% &' ('  )* java/lang/StringBuilderScalarMult.MapStage1: s =     +org/apache/hadoop/io/Textv + !org/apache/hadoop/io/LongWritable 89 pegasus/ScalarMult$MapStage1Text&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf(Ljava/lang/String;)I substring(II)Ljava/lang/String;java/lang/IntegerparseIntcharAt(I)C(I)Ljava/lang/String;(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ScalarMult!!"#$%&'(')*+,-P**Y**.HJKL/ 0345-f**+ Y * .P R)S/*03*6789-M |,:669`v` 9` 9-YY Y * k.& WXZ[]-^>`Lc{d/\ |03|:;|<=|>?|@AvBCmDE`FE]G*H |>IJ> KLMNOP QRSTA8U-b*+,- .H/403VWXWY?ZAQRS[\]2 !1 PEGASUS/classes/pegasus/ConCmpt$RedStage4.class0000644000000000000000000000440111443145617020135 0ustar rootroot2N 3 45 467 8 9 :; <>?@()VCodeLineNumberTableLocalVariableTablethis RedStage4 InnerClassesLpegasus/ConCmpt$RedStage4;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_countIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;countLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsA Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile ConCmpt.java B CD EF org/apache/hadoop/io/IntWritable GH IJ KL Mpegasus/ConCmpt$RedStage4&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!   /*0 56,,6`6-+Y46 78!9$;4<H55555 2!"5#5$% &'()A*_ *+,-04  +, - . / &'(012  = PEGASUS/classes/pegasus/SaxpyTextoutput.class0000644000000000000000000001060611443145620020253 0ustar rootroot2 =m no mp m qr st suv wx qy z {|} ~  m      ^    'W 'Yh ' ' ' ' ' ' ' RedStage1 InnerClasses MapStage1 nreducersI()VCodeLineNumberTableLocalVariableTablethisLpegasus/SaxpyTextoutput;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iret_valy_pathLorg/apache/hadoop/fs/Path;x_pathparam_aDfs!Lorg/apache/hadoop/fs/FileSystem; saxpy_output StackMapTablepN}configSaxpyTextoutputv(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;D)Lorg/apache/hadoop/mapred/JobConf;pypxaconf"Lorg/apache/hadoop/mapred/JobConf; SourceFileSaxpyTextoutput.java DE BC$org/apache/hadoop/conf/Configurationpegasus/SaxpyTextoutput T 5SaxpyTextoutput <# of reducers>  RS org/apache/hadoop/fs/Path D = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder,[PEGASUS] Computing SaxpyTextoutput. y_path=  , x_path=, a=    hsaxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1 saxpy_output1 de = [PEGASUS] SaxpyTextoutput computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConf D SaxpyTextoutput !pegasus/SaxpyTextoutput$MapStage1 !pegasus/SaxpyTextoutput$RedStage1    org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/DoubleWritable org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystem!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;(D)Ljava/lang/StringBuilder;toStringgetConf(()Lorg/apache/hadoop/conf/Configuration;getI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;java/lang/Stringequals(Ljava/lang/Object;)Zdelete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetMapOutputValueClasssetOutputValueClass!=>BCDEF8 **G "pH  IJ KLFTYY*<GuwxHMNOCPQ RSF0  G~TUF + =*+2Y+2NY+2:+29  Y- *:Y:-  ! Y":=#W*-$%W Y& GN !-5=u~HRIJMN VC!WX-YX5Z[~g\]\^X_ `abbcbPQdeF#'Y*(:)+*+,*,Y-*./0123Y+SY,S4-5*6789:;<G:&AHPXioxH>IJfXgX^Xh[ijPQkl@2? 0A PEGASUS/classes/pegasus/Hadi$EdgeType.class0000644000000000000000000000175111443145617017370 0ustar rootroot25 & '(* + , - . /0RegularEdgeType InnerClassesLpegasus/Hadi$EdgeType;Inverted$VALUES[Lpegasus/Hadi$EdgeType;values()[Lpegasus/Hadi$EdgeType;CodeLineNumberTablevalueOf+(Ljava/lang/String;)Lpegasus/Hadi$EdgeType;LocalVariableTablenameLjava/lang/String;(Ljava/lang/String;I)Vthis Signature()V)Ljava/lang/Enum; SourceFile Hadi.java  123pegasus/Hadi$EdgeType 4    java/lang/Enumclone()Ljava/lang/Object; pegasus/Hadi5(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;@0 @ @ "  5 *  1*+  !"!F.Y Y  Y SY S #$% )@PEGASUS/classes/pegasus/ConCmptBlock$MapStage1.class0000644000000000000000000000522511443145617021115 0ustar rootroot2i ? @A BCD BEF GH IJ K LMN ? O @P QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses Lpegasus/ConCmptBlock$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV, ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptBlock.java  XY#V Z[ \] org/apache/hadoop/io/IntWritable^ _` aorg/apache/hadoop/io/Text bc dejava/lang/StringBuilder fg!org/apache/hadoop/io/LongWritable  hpegasus/ConCmptBlock$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;pegasus/ConCmptBlock!/*3  6,::%-Y2 Y2 ;-Y2 Y Y22 * 789;=!>"@)AKCEH!"#$%&'(~)*j+,- %./01(72345A6b*+, -347898:&;(234<=> R PEGASUS/classes/pegasus/MatvecUtils.class0000644000000000000000000000270711443145617017272 0ustar rootroot2R 01 23 45 678 29: 0 ; < = >?@()VCodeLineNumberTableLocalVariableTablethisLpegasus/MatvecUtils;decodeBlockVector(Ljava/lang/String;I)[DrowSvalDstrValLjava/lang/String; block_widthIivector[Dtokens[Ljava/lang/String; StackMapTable#%encodeBlockVector([DI)Ljava/lang/String;vecresultA SourceFileMatvecUtils.java  A BCD EFG HI JKjava/lang/StringBuilder LM NO LP LQpegasus/MatvecUtilsjava/lang/Objectjava/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Short parseShort(Ljava/lang/String;)Sjava/lang/Double parseDouble(Ljava/lang/String;)Dlength()Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;!/*  HN= -R*:=#26`29-R-. #$ %$(+&,/-:/@+F2H/:HH B! D"#+$%&' (& )*XN=O*1A-Y -  NY -    *1 N-"<> ?@A-CP>VG*X+#X S! U,& -'"./PEGASUS/classes/pegasus/HadiBlock$MapStage3.class0000644000000000000000000001074611443145617020425 0ustar rootroot2 ,tu v +w +x +y2 z{ |}4 ~ t  $   @ ? $ q $ +  +zero_id"Lorg/apache/hadoop/io/IntWritable; output_valLorg/apache/hadoop/io/Text; nreplicationIencode_bitmask()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClassesLpegasus/HadiBlock$MapStage3; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vbitmask_start_index bitmask_strLjava/lang/String;bitmask[Ijbitmasks[Ljava/lang/String;cur_elem avg_bitposDkey#Lorg/apache/hadoop/io/LongWritable;valueoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;ilinetokenssum_nhconverged_count changed_countLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableKH Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileHadiBlock.java 56 org/apache/hadoop/io/IntWritable 5 ./ 23 43   java/lang/StringBuilderMapStage3 : nreplication = , encode_bitmask=       ~  org/apache/hadoop/io/Text 5 01 !org/apache/hadoop/io/LongWritable BCpegasus/HadiBlock$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException(I)V org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;charAt(I)CindexOf(I)Ipegasus/BitShuffleCoderdecode_bitmasks(Ljava/lang/String;I)[Ipegasus/FMBitmaskfind_least_zero_pos(J)Ijava/lang/Long parseLong(Ljava/lang/String;I)Jjava/lang/Mathpow(DD)Djava/lang/Double(D)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiBlock!+,-./012343567Q**Y**89 :=>?7C*+ *+   Y **8   B 9C:=C@ABC7c/,:2:9 6 6 6`2:  c  i 9*F ~6 `:*:6*.c90 :62c9*o9 !"oc9 :*$Y Y  %  &'-**'(8z  #.7BEPS!V"^#g$r%}'(')*+,+/03 5.69g7D3r,EF}!GH!I3$JK7LF VMNBI3/:=/OP/Q1/RS/TU& V3 $WKXKYN Z3 # [3 \ /R]^ & _`abcdde ,_`abcddeef_`abcdde _`abcdded _`abcddghijABk7b*+),$-*894:=lmnmoSpUghiqrs< +; PEGASUS/classes/pegasus/PagerankNaive$RedStage4.class0000644000000000000000000000442711443145620021307 0ustar rootroot2N 3 45 467 8 9 :; <>?@()VCodeLineNumberTableLocalVariableTablethis RedStage4 InnerClasses!Lpegasus/PagerankNaive$RedStage4;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_valueIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsA Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankNaive.java B CD EF org/apache/hadoop/io/IntWritable GH IJ KL Mpegasus/PagerankNaive$RedStage4&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive!   /*' 56,,6`6-+Y+- ./!0$243H55555 2!"5#5$% &'()A*_ *+,-'4  +, - . / &'(012  = PEGASUS/classes/pegasus/L1norm.class0000644000000000000000000000675211443145620016200 0ustar rootroot2 0U VW UX U YZ [\ []^ _` Ya bc def Ug h ij k l mnL mo p qrstu vw xy z| ~     MapStage1 InnerClasses nreducersI()VCodeLineNumberTableLocalVariableTablethisLpegasus/L1norm;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iin_pathLorg/apache/hadoop/fs/Path;fs!Lorg/apache/hadoop/fs/FileSystem; l1norm_output StackMapTable configL1normZ(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Lorg/apache/hadoop/mapred/JobConf;out_pathconf"Lorg/apache/hadoop/mapred/JobConf; SourceFile L1norm.java 67 45$org/apache/hadoop/conf/Configuration pegasus/Saxpy F L1norm  DEorg/apache/hadoop/fs/Path 6= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilder$[PEGASUS] Computing L1norm. in_path=   NO 4 [PEGASUS] L1norm computed. Output is saved in HDFS  org/apache/hadoop/mapred/JobConfpegasus/L1norm 6L1norm pegasus/L1norm$MapStage1 !pegasus/PegasusUtils$RedSumDouble RedSumDouble    org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/DoubleWritable !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;getName()Ljava/lang/String;toStringgetConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)V setJobNamesetMapperClass(Ljava/lang/Class;)Vpegasus/PegasusUtilssetReducerClasssetCombinerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass! 01456788 **9 ">:  ;< =>8TYY*<9CEF:?@A5BC DE80  9LNPFG8 + Y+2M  Y, *NY:-W*,W Y 9. VW Z\]>_F`QaXbcdf:4;<?@rHIFAJKQ6LIM BCNO8PY* !N-"#-$%-&'-&(-Y+S)-,*-+-,--./-9. lmop#q*s6t;v@xGyN{:*P;<PHIPPIAQRBCST3$ 2 &{} PEGASUS/classes/pegasus/PegasusUtils$RedAvgDouble.class0000644000000000000000000000457611443145620021752 0ustar rootroot2R 6 78 79: ; < =>? @BCD()VCodeLineNumberTableLocalVariableTablethis RedAvgDouble InnerClasses#Lpegasus/PegasusUtils$RedAvgDouble;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valDkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumcountILocalVariableTypeTable;Ljava/util/Iterator;sLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsE Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java F GH IJ#org/apache/hadoop/io/DoubleWritable KL MN OP org/apache/hadoop/io/IntWritable Q!pegasus/PegasusUtils$RedAvgDouble&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!   /*X  ?96,,9c9-+Yo& \]_`a$b'c*e>fR ????? !<"9#$%?&?'(#)*+,A-_ *+,- X4  ./ 0 1 2!)*+345  A PEGASUS/classes/pegasus/PagerankBlock$RedStage2.class0000644000000000000000000001173011443145620021270 0ustar rootroot2 1x 0y 0z 0{ 0|9 }~ 3 05 8  x         ! !     0 block_widthImixing_cD random_coeffconverge_threshold number_nodes()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClasses!Lpegasus/PagerankBlock$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv_elemLpegasus/VectorElem;cur_strLjava/lang/String; cur_vectorLjava/util/ArrayList; vector_iterLjava/util/Iterator;cur_vediffkey"Lorg/apache/hadoop/io/IntWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; self_vectoriout_vals[Dout_strsv_iterLocalVariableTypeTable(Lpegasus/VectorElem;?Ljava/util/ArrayList;>;>Ljava/util/Iterator;>;1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable] Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePagerankBlock.java :; 56 76 86 94  34  java/lang/StringBuilderRedStage2 : block_width= , converge_threshold=   org/apache/hadoop/io/Text java/lang/Double  pegasus/VectorElem q v   :     org/apache/hadoop/io/IntWritable GHpegasus/PagerankBlock$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/ArrayListjava/lang/Stringjava/util/Iteratorjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;charAt(I)C substring(I)Ljava/lang/String; pegasus/GIMVparseVectorVal:(Ljava/lang/String;Ljava/lang/Class;)Ljava/util/ArrayList;iterator()Ljava/util/Iterator;rowSval doubleValue()Dlength()I(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vjava/lang/Mathabs(D)D pegasus/PagerankBlock$PrCounters PrCountersCONVERGE_CHECK"Lpegasus/PagerankBlock$PrCounters;!org/apache/hadoop/mapred/Reporter incrCounter(Ljava/lang/Enum;J)Vpegasus/PagerankBlock!0123456768694:;<S*****= > ?BCD<m*+*+  *+  **g*o*+  Y* *= '7Dl>m?BmEFGH<F p:* :6* R,t,:s::  :  ( !:  "\1 #$cRԧ%:6* d&Y':1*k*cRY('1:-+Y)* :  C !:  #$ "1g+9  *, -=" #,:ESVdku%/;T^ilo>IJ :`KLd6MN k/OP ;1QJ TR6 p?BpSTpUPpVWpXYmZNb[4 e\]^L%K_P `RIa d6Mb k/Oc ;1Qa pUdpVemZb%K_c f4 gh2igj.i'AjFklmnAGo<_ *+.,-/=>4 ?B pq rP sW tYklmuvwA0@ @PEGASUS/classes/pegasus/EdgeType.class0000644000000000000000000000161211443145617016532 0ustar rootroot21 $ %&' ( ) * + ,-RealLpegasus/EdgeType;Binary$VALUES[Lpegasus/EdgeType;values()[Lpegasus/EdgeType;CodeLineNumberTablevalueOf&(Ljava/lang/String;)Lpegasus/EdgeType;LocalVariableTablenameLjava/lang/String;(Ljava/lang/String;I)Vthis Signature()V$Ljava/lang/Enum; SourceFile GIMV.java  ./pegasus/EdgeType 0    java/lang/Enumclone()Ljava/lang/Object;5(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;@0 @ @ " ; 5 *;  1*+;  F.Y Y  Y SY S;!"#PEGASUS/classes/pegasus/ConCmpt$RedStage1.class0000644000000000000000000001013711443145617020135 0ustar rootroot2 'i &j) kl m nop iq r s t uvwx i yz y{| t} ~ ~ ~      & number_nodesI()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/ConCmpt$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V from_node_int from_cur_nodeLorg/apache/hadoop/io/Text;component_infoLjava/lang/String; cur_key_intkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;component_id_strfrom_nodes_setLjava/util/Set;self_containedZline from_nodes_itLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;$Ljava/util/Set; StackMapTable| Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile ConCmpt.java +, )*  java/lang/StringBuilder0RedStage1 : configure is called. number_nodes =  java/util/HashSet org/apache/hadoop/io/Textm  java/lang/Integer msi org/apache/hadoop/io/IntWritable + + moi 89pegasus/ConCmpt$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/String java/util/Setjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object; startsWith(Ljava/lang/String;)Zlength()I substring(I)Ljava/lang/String;valueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Ziterator()Ljava/util/Iterator;intValue(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!&'()*+,-8 **. Z\/  0345-f**+Y * ._ a)b/*03*6789- :Y:6:,U,:  :.:#6  W+ 6+W:  } 6  +4Y  : - Y !Y "#1Y$  : - Y !Y "#.nfg hikl(m/o9pAqLsSt`uivlxozt{}~ /S:* (D;< => => h?* 03@ABCDEFG H> IJKLM>NC O BPDQ IRS<  TUVWXYZY7[VR-\]^_A8`-_ *+ ,-%.Z/4 03 ab cC dE eG\]^fgh2 &1 PEGASUS/classes/pegasus/RWRNaive$MapStage2.class0000644000000000000000000000443111443145620020225 0ustar rootroot2V 4 56 789 :; <= > ?@A BDEF()VCodeLineNumberTableLocalVariableTablethis MapStage2 InnerClassesLpegasus/RWRNaive$MapStage2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; ExceptionsG Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRNaive.java  HI J KL org/apache/hadoop/io/IntWritableM NO Porg/apache/hadoop/io/Text QR ST!org/apache/hadoop/io/LongWritable Upegasus/RWRNaive$MapStage2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)V(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive! /* +,:-Y2Y2  *>++++ +!" #$% +&'()*A+b*+ ,- 4,-.-/ 0"'()123  C PEGASUS/classes/pegasus/ConCmpt.class0000644000000000000000000002456711443145617016405 0ustar rootroot2                               !"#$% &'( ) *+,- . / 01 2 3 4 56 78 59: 5; < = > d? d@ABCD 5E 5FG HIJKL MNO RP TQR TSTUVWXYZ T[\]  5^_ d`abc id he gf ggh i j g[k tlm vn vop vqr vst vu vw xy vz{ v|} v~ v  RedStage4 InnerClasses MapStage4 RedStage3 MapStage3CombinerStage2 RedStage2 MapStage2 RedStage1 MapStage1MAX_ITERATIONSI changed_nodes[Iunchanged_nodes iter_counter edge_pathLorg/apache/hadoop/fs/Path; curbm_path tempbm_path nextbm_path output_pathsummaryout_pathlocal_output_pathLjava/lang/String; number_nodes nreducerscur_iterstart_from_newbmmake_symmetric()VCodeLineNumberTableLocalVariableTablethisLpegasus/ConCmpt;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ifs!Lorg/apache/hadoop/fs/FileSystem;new_pathriLpegasus/ResultInfo;i StackMapTable_gen_component_vector_file(ILorg/apache/hadoop/fs/Path;)Vlen start_pos max_filesize gen_one_file!(IIILorg/apache/hadoop/fs/Path;)V cur_nodeidj thresholdcount file_namefileLjava/io/FileWriter;outLjava/io/BufferedWriter;OPreadIterationOutput((Ljava/lang/String;)Lpegasus/ResultInfo;lineinLjava/io/BufferedReader;eLjava/io/IOException; file_lineak configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configStage2 configStage3 configStage4 SourceFile ConCmpt.java $org/apache/hadoop/conf/Configurationpegasus/ConCmpt   concmpt <# of nodes> <# of tasks>    org/apache/hadoop/fs/Path concmpt_summaryout new  java/lang/StringBuilderStarting from cur_iter =   makesym= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- 5[PEGASUS] Computing connected component. Edge path =  , Newbm =  , Reducers = _temp (Generating initial component vector for  nodes   done1Resuming from current component vector at radius()      /   Hop  : changed = , unchanged = -All the component ids converged. Finishing...  /Summarizing connected components information... ( [PEGASUS] Connected component computed.[PEGASUS] Total Iteration = r[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as "node_id 'msf'component_id" format}[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as "component_id number_of_nodes" format. component_vector.temp.java/io/FileWriterjava/io/BufferedWriter !# component vector file - hadoop # number of nodes in graph =  , start_pos= )creating bitmask generation cmd for node  ~  msi. ./ pegasus/ResultInfo /part-00000java/io/BufferedReaderjava/io/InputStreamReaderjava/io/FileInputStreamUTF8     java/io/IOException  org/apache/hadoop/mapred/JobConf  ConCmpt_Stage1 pegasus/ConCmpt$MapStage1 pegasus/ConCmpt$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text ConCmpt_Stage2pegasus/ConCmpt$MapStage2pegasus/ConCmpt$RedStage2pegasus/ConCmpt$CombinerStage2 ConCmpt_Stage3pegasus/ConCmpt$MapStage3pegasus/ConCmpt$RedStage3ConCmpt_Stage4pegasus/ConCmpt$MapStage4pegasus/ConCmpt$RedStage4 !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionorg/apache/hadoop/fs/FileSystemjava/lang/String!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)I compareTo substring(I)Ljava/lang/String;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;print"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;getLocalN(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/LocalFileSystem;org/apache/hadoop/fs/FileUtil fullyDelete?(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/fs/Path;)VgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;copyToLocalFile9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Vchanged unchangeddelete(Lorg/apache/hadoop/fs/Path;)Zrename9(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)Z(Ljava/io/Writer;)VwriteclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V*(Ljava/io/InputStream;Ljava/lang/String;)V(Ljava/io/Reader;)VreadLinesplit'(Ljava/lang/String;)[Ljava/lang/String; startsWith(Ljava/lang/String;)ZprintStackTrace:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetCombinerClass!  <********* * * * 2 (C DEFGH"J'K,L1M6N < T YY*<SUV 0[]_+ *Y+2*Y+2*Y+2*Y+2*Y+2*Y*+2*+2 +2 * 3* *+2 Y !"* #$+2% * * &Y '"+2"("+2")"* #$*Y +2"*"$+* ;Y ,"*#-"$.***/0$Y 1"* #2"$* =32*Y ` *45W*65W*75W*89Y*+:*8;NY *+"<"#$:-*Y=>:?@AO?@BO@`@Y C"#D"A#E"B#$A6F-*GW-*GW-*GW-**HW1-*GW-*GW-*GW-**HW*89Y*+:I*J5WKY L"@#$MN<ef ij'k5lCmQn^ohpsrsuvwz{} $,MYd$W_gpy>6* {/ -7>O66*6d d6*,P`6ֱ& ",6H7775 .1 td ;666Y Q"#$: RY S: TY U:  VW Y X"#Y"#Z"$WY ["#\"`#$6U`6  Y  #]" #Z"$W l^.6  _`*8;:  YY a" "$YY b"<" "$cV )4;a:F ;;;;;85 2 ) 4 E ' E t dYeL++ZBAY *"f"$M`NgYhYiY,jklm:nN-9-op:2qr+2A+2BnNs :u+)tJ&)CIMUa o z HU+CE&n)k9I% vY*8wL+xY `"*#$y+zY `"* #$y+{Y `"* #$y+|}+~++Y*SY*S+*+* +++6 +Gci!p"w$%')*,vY*8wL+xY `"*#$y+zY `"* #$y+{Y `"* #$y+}++++Y*S+*+* +++:23+4G5c6i8p9w:~<=?ABDrvY*8wL+xY `"*#$y+}++++Y*S+*++++2 JK+L1N8O?PFRUS]UbWiXpZrcvY*8wL+xY `"*#$y+zY `"* #$y+{Y `"* #$y+}++++Y*S+*+* +++:`a+bGccdifpgwh~jkmopr?33 ?3 @*+,-J         ~ PEGASUS/classes/pegasus/PegasusUtils$RangePartitionS1.class0000644000000000000000000000367711443145620022602 0ustar rootroot2` 6 78 9: ; <=> 6? @ A B CD EFG HJKL number_nodesI()VCodeLineNumberTableLocalVariableTablethisRangePartitionS1 InnerClasses'Lpegasus/PegasusUtils$RangePartitionS1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf; getPartitionA(Lorg/apache/hadoop/io/IntWritable;Lorg/apache/hadoop/io/Text;I)Ikey"Lorg/apache/hadoop/io/IntWritable;valueLorg/apache/hadoop/io/Text;numReduceTasksresult StackMapTable((Ljava/lang/Object;Ljava/lang/Object;I)Ix0Ljava/lang/Object;x1x2 SignaturewLjava/lang/Object;Lorg/apache/hadoop/mapred/Partitioner; SourceFilePegasusUtils.java M NOP QR S TUjava/lang/StringBuilder+RangePartition configure(): number_nodes = VW VX YZ[ \] N^ org/apache/hadoop/io/IntWritableorg/apache/hadoop/io/Text $%_%pegasus/PegasusUtils$RangePartitionS1java/lang/Object$org/apache/hadoop/mapred/Partitioner org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V()Ipegasus/PegasusUtils!/*  !f**+Y *  )**"#$%+*ok6d   4&'()* +,A$-V*+,*./0/12345 I PEGASUS/classes/pegasus/PegasusUtils$RedSumDoubleLongKey.class0000644000000000000000000000456211443145620023265 0ustar rootroot2P 4 56 578 9 : ;<= >@AB()VCodeLineNumberTableLocalVariableTablethisRedSumDoubleLongKey InnerClasses*Lpegasus/PegasusUtils$RedSumDoubleLongKey;reduce(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valDkey#Lorg/apache/hadoop/io/LongWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable;Ljava/util/Iterator;tLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsC Signature(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java D EF GH#org/apache/hadoop/io/DoubleWritable IJ KL MN!org/apache/hadoop/io/LongWritable O(pegasus/PegasusUtils$RedSumDoubleLongKey&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!   /*I  59,,9c9-+YMO PQ!R$T4UH55555 !2"#5$5%& '()*A+_ *+,- I4  ,- . / 0!'()123  ? PEGASUS/classes/pegasus/PegasusUtils$RedIdentity.class0000644000000000000000000000443711443145620021667 0ustar rootroot2O 3 45 467 8 9 :;< =?@A()VCodeLineNumberTableLocalVariableTablethis RedIdentity InnerClasses"Lpegasus/PegasusUtils$RedIdentity;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vcur_valLjava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;LocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsB Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFilePegasusUtils.java C DE FGorg/apache/hadoop/io/Text HI JK LM org/apache/hadoop/io/IntWritable N pegasus/PegasusUtils$RedIdentity&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PegasusUtils!   /* +,$,:-+Yٱ '*>+++++ !"+#+$%)&'()A*_ *+,- 4  +, - . /!&'(012  > PEGASUS/classes/pegasus/HadiIVGen$MapStage1.class0000644000000000000000000000512611443145617020335 0ustar rootroot2i ? @A BCD BEF GH IJK ? L @ M NOP QSTU()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClassesLpegasus/HadiIVGen$MapStage1;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV, ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileHadiIVGen.java  XY#V Z[ \] org/apache/hadoop/io/IntWritable^ _` aorg/apache/hadoop/io/Textjava/lang/StringBuilder bc de fg!org/apache/hadoop/io/LongWritable  hpegasus/HadiIVGen$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I(I)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/HadiIVGen!/*(  [,::-Y2 Y Y 2  2 ",-.01!2"4Z5H[[!"[#$[%&['(U)*A+,- [%./012345A6b*+, -(47898:&;(234<=> R PEGASUS/classes/pegasus/ConCmpt$CombinerStage2.class0000644000000000000000000000640411443145617021164 0ustar rootroot2 RS TU TVW X YZ [\] ^_` Ra bc X de Yf g hi [j kl mopq()VCodeLineNumberTableLocalVariableTablethisCombinerStage2 InnerClasses Lpegasus/ConCmpt$CombinerStage2;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VexLjava/lang/Exception;cur_value_textLorg/apache/hadoop/io/Text; cur_ci_stringLjava/lang/String; cur_nodeidIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;out_valcur_min_nodeidLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTablerolstuW] Exceptionsv Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile ConCmpt.java mois wx yzorg/apache/hadoop/io/Text {|r }~ java/lang/Exception java/lang/StringBuilderException! cur_ci_string=[ ] t {~  org/apache/hadoop/io/IntWritable %&pegasus/ConCmpt$CombinerStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOExceptionhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String; substring(I)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;java/io/PrintStreamprintln(Ljava/lang/String;)VcharAt(I)C(Lorg/apache/hadoop/io/Text;)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!/* !$%& :6,,::6 6 $: Y  s-+Y  6  6}, Y :-+Y%03 Z"%035T_orx p 5'( o)*"h+,%e-. !$/01234567,8.91:3;<5=+ >?@AB=C=D  .EFGHA%I_ *+,- 4 !$ JK L2 M4 N6EFGOPQ# n" PEGASUS/classes/pegasus/Hadi$RedStage5.class0000644000000000000000000000436311443145617017447 0ustar rootroot2N 3 45 467 8 9 :; <>?@()VCodeLineNumberTableLocalVariableTablethis RedStage5 InnerClassesLpegasus/Hadi$RedStage5;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_countIkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;sumLocalVariableTypeTable8Ljava/util/Iterator;pLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsA Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile Hadi.java B CD EF org/apache/hadoop/io/IntWritable GH IJ KL Mpegasus/Hadi$RedStage5&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()I(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V pegasus/Hadi!   /* 56,,6`6-+Y !$4H55555 2!"5#5$% &'()A*_ *+,-4  +, - . / &'(012  = PEGASUS/classes/pegasus/ConCmptBlock$MapStage5.class0000644000000000000000000000664611443145617021131 0ustar rootroot2 Z[ Z \ ] ^# _` ab c def Zg h i j kl jm no npq r stuv wyz{ out_key_int"Lorg/apache/hadoop/io/IntWritable; out_count_int block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage5 InnerClasses Lpegasus/ConCmptBlock$MapStage5; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_minnodeikey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;elemsLocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; StackMapTableyuv|}~A Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileConCmptBlock.java %& org/apache/hadoop/io/IntWritable ! % "!  #$ java/lang/StringBuilder.MapStage5 : configure is called. block_width=   ~  | !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text 23pegasus/ConCmptBlock$MapStage5&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapper(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/lang/Stringjava/io/IOException(I)V org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;setcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptBlock! !"!#$%&'N**Y*Y(GIJ) *-./'f**+ Y * (N P)Q)**-*0123'4 S,::2:6,`2 6 * -**ұ(& UVWY)Z5\>]LYR_)f 54$ !15$S*-S67S89S:;S<=M>?D@A5BAC S:DE%! FGHIJKLL0MNOPA2Q'b*+,-(G)4*-RSTSU;V=MNOWXY, x+ PEGASUS/classes/pegasus/PagerankNaive$MapStage4.class0000644000000000000000000000700211443145620021302 0ustar rootroot2 "[\ [ !] !^ !_ !` !a& bc de( fgh [i j kl m no mp qrs qt qu v wxyz !{}~ from_node_int"Lorg/apache/hadoop/io/IntWritable;min_prDmax_prgap_pr hist_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage4 InnerClasses!Lpegasus/PagerankNaive$MapStage4; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;pagerank distr_indexLocalVariableTypeTablepLorg/apache/hadoop/mapred/OutputCollector; StackMapTableF Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFilePagerankNaive.java ,- org/apache/hadoop/io/IntWritable $% &' (' )' *+   java/lang/StringBuilderMapStage4: min_pr =  , max_pr =  #  , !org/apache/hadoop/io/LongWritableorg/apache/hadoop/io/Text 9:pegasus/PagerankNaive$MapStage4&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/PagerankNaive!!"#$%&'(')'*+,-.d&**Y****/    0 &1456.P*+ *+ ***g Y**/ 'O0P14P789:.- g,::2 9**gk*o`6  *` *6 -Y Y/* ' >!I"O#f$0\ g14g;<g=>g?@gABaCDMEF'@G'>)H+ I g?JKL=MNOPQA9R.b*+,- /0414STUTV@WBNOPXYZ3 !|2 PEGASUS/classes/pegasus/RWRBlock$MapStage25.class0000644000000000000000000000657611443145620020316 0ustar rootroot2 S! TU VW X YZ[ S\ ] ^ _ `a _b cd ce cfg hij klm n o pqr suvw block_widthI()VCodeLineNumberTableLocalVariableTablethis MapStage25 InnerClassesLpegasus/RWRBlock$MapStage25; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Velem_row rwr_scoreDkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String;tokensiblock_idLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector; StackMapTable> Exceptionsx Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile RWRBlock.java #$y z{| }~ !" java/lang/StringBuilderMapStage25: block_width =       org/apache/hadoop/io/IntWritable #org/apache/hadoop/io/Textv # !org/apache/hadoop/io/LongWritable 01pegasus/RWRBlock$MapStage25&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)D(I)V(D)Ljava/lang/StringBuilder;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRBlock! !"#$%/*&' (+,-%f**+Y * & )'*(+*./01% ,::2s2:2:266Q26 `29 -Y*h `YY   &2 -8ALVb'p V>2" b234 (+56789:;< =>?>DW@"AZA"B 9CD-EE UFGHIA0J%b*+,-&'4(+KLMLN:O<FGHPQR* t) PEGASUS/classes/pegasus/BitShuffleCoder.class0000644000000000000000000000717411443145616020044 0ustar rootroot2u O PQ PR ST UVW O X Y Z[@ \] S^ P_`ab()VCodeLineNumberTableLocalVariableTablethisLpegasus/BitShuffleCoder;decode_bitmasks(Ljava/lang/String;I)[IjI cur_valuecur_bytestrLjava/lang/String;Kiresult[I fill_valuecumulated_value byte_buffer byte_bufpos str_bytes[B StackMapTablec*0 fill_result([IIII)Vstart_istart_jcountencode_bitmasks([II)Ljava/lang/String;cur_bitBcur_maskbm_arrayprev_bit cur_count([JI)Ljava/lang/String;J[JD encode_value(I)Ljava/lang/String; added_valuenumber one_masksnbytes temp_result SourceFile Hadi.java c de fgh ij 56java/lang/StringBuilder kl FG mn80o pq rG st0pegasus/BitShuffleCoderjava/lang/Objectjava/lang/StringgetBytes()[B substring(II)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;I)Iappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/lang/Mathceil(D)D toHexStringlength()I /**   >  :66=O :6 *: = *`6  ~\  ~O6> . ddhx`6 `6d66   ~Oxf034 6769":%<+>4?CALBYD\EdFwE}JKMNOQ>Uz ^G!"\I#"Cu$" %&'" (")*+" ,""-*%." +/0 1 23 23340 2334 2334  2334 56M Tl6p666 > 66!*\.dxO߄:Z[ \ ^_`&b)d/e=fFgGdM^Skf #!")*!"T)*T'"T#"T,"C("O7" J8" G9"1; 3 3 3 :; M66> dx66r*.~ 66(Y,  M66. !Y,  M66wY,  M,fqst xyz"{,|2~5;@VZ]`gmzxf /<=5\<={!"~>"?*'")& ("@= A"1h 32 32 32  3232 :B  M66> dx76t*/ 66(Y,  M66. !Y,  M66tY,  M,f #/58>CY]`cjpf 2<=8\<=}!">C?D'")& ("@= A"1h E2 E2 E2  E2E2 FG > YOY?OY OYOYO:<x=~ `o6<2.~>.~hz6hx>:pY  :R (/39<BQY]kxR3>"x H"I"+(" )"(J*QaK"$L&1: #333&2MNPEGASUS/classes/pegasus/ConCmptIVGen$RedStage1.class0000644000000000000000000000656111443145617021034 0ustar rootroot2 U V WX YZ [\] U^ _ ` a bc de dfg ah ijk lm n op qstu number_nodesI()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses Lpegasus/ConCmptIVGen$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vicur_textLorg/apache/hadoop/io/Text;line[Ljava/lang/String; start_nodeend_nodekey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;LocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTableskvwxg2 Exceptionsy Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileConCmptIVGen.java  z {|} ~ java/lang/StringBuilderRedStage1: number_nodes =  v org/apache/hadoop/io/Text   org/apache/hadoop/io/IntWritable v w ,-pegasus/ConCmptIVGen$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;java/lang/Stringsplit'(Ljava/lang/String;)[Ljava/lang/String;(I)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmptIVGen! !8 **" 8:#  $'()!f**+Y * "= ?)@#*$'**+,-!c q,j,::26266  3-Y YY   ̧"* F GH J)K2M=NgMmPpQ#f 67. Y/0 M12)G32>4q$'q56q78q9:q;<=q7>q9?@(5 ABCDEFG6HIJKA,L!_ *+,-"8#4 $' MN O8 P: Q<HIJRST& r% PEGASUS/classes/pegasus/HadiIVGen.class0000644000000000000000000001301511443145617016562 0ustar rootroot2! T             0 2 2  2   @\ @`_ @ @ @ @ @ @ RedStage1 InnerClasses MapStage1 input_pathLorg/apache/hadoop/fs/Path; output_path number_nodesInumber_reducersencode_bitmask nreplicationfs!Lorg/apache/hadoop/fs/FileSystem;()VCodeLineNumberTableLocalVariableTablethisLpegasus/HadiIVGen;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Iinput_path_nameLjava/lang/String; StackMapTable gen_cmd_file!(IIILorg/apache/hadoop/fs/Path;)Vend_node start_node num_nodes num_reducersi file_namefileLjava/io/FileWriter;outLjava/io/BufferedWriter;step configStage1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; SourceFileHadiIVGen.java cd YZ [Z \] ^] _] `]$org/apache/hadoop/conf/Configurationpegasus/HadiIVGen s RHadiIVGen <# of nodes> <# of reducers>  qrorg/apache/hadoop/fs/Path cjava/lang/StringBuilder hadi_ivcmd  r  enc = -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- <[PEGASUS] Generating initial bistring vector. Output path = , number of nodes = , number of reducers =, nreplication=, encode_bitmask =  yz     ab   > [PEGASUS] Initial bistring vector for HADI generated in HDFS  hadi_iv.tempjava/io/FileWriterjava/io/BufferedWriter c '# component vector file from HadiIVGen  # number of nodes in graph = )creating initial vector generation cmd...   ddone..//  org/apache/hadoop/mapred/JobConf c HadiIVGen_pass1 pegasus/HadiIVGen$MapStage1 pegasus/HadiIVGen$RedStage1     org/apache/hadoop/io/IntWritable org/apache/hadoop/io/Text  !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exceptionjava/lang/Stringjava/io/IOException!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;length substring(I)Ljava/lang/String;toString()Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)I compareTo(I)Ljava/lang/StringBuilder;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Z(Ljava/io/Writer;)VwriteprintclosecopyFromLocalFile:(ZLorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;)V:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClass! TUYZ[Z\]^]_]`]abcdef$******* f!t uvwxyg $hi jkeTY Y * < fglmn]op qre0fste(+*Y+2Y+2+2dM*Y,*+2*+2*+2+2 **Y +2!*"#*"$*"%*"&*****'*()W***+,*,*-WY.+2&fN 9EOYcnv{&g (hi(lm9uvw  kxopyze) /:0Y1:2Y3:45Y6*"&578l6 6X h6 d `hd6 d6 Y"9 "9 "&5:;**+:  YY<YY=>?fN!AINW^ftygq{] ^N|] y3{] hi}]~]`]YZQ]vN] Eab w(Q x"2oe @Y** AL+BYC*"D+EYC*"D+FYC*"D+GH+IJ+KL+Y*SM+*N+*O+PQ+RS+f6 +GcipwghiopWK V I X PEGASUS/classes/pegasus/VectorElem.class0000644000000000000000000000135411443145617017074 0ustar rootroot2(   !" #$%rowSvalLjava/lang/Object; SignatureTT;(SLjava/lang/Object;)VCodeLineNumberTableLocalVariableTablethisLpegasus/VectorElem;in_rowin_valLocalVariableTypeTableLpegasus/VectorElem;(STT;)V getDouble()D(Ljava/lang/Object; SourceFile GIMV.java &  java/lang/Double 'pegasus/VectorElemjava/lang/Object()V doubleValue     u***,%& '(     G *+     PEGASUS/classes/pegasus/RWRNaive$RedStage1.class0000644000000000000000000001023311443145620020216 0ustar rootroot2 -n ,o ,p ,q/ rs 't1 uv wxy nz { |} ~  n   '     ' % , number_nodesImixing_cD random_coeff()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClassesLpegasus/RWRNaive$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V line_textLjava/lang/String;line[Ljava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;icur_rankdst_nodes_listLjava/util/ArrayList;outdegLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector;*Ljava/util/ArrayList; StackMapTableF Exceptions Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRNaive.java 45 /0 12 32   java/lang/StringBuilderRedStage1: number_nodes =  , mixing_c = , random_coeff =  java/util/ArrayList org/apache/hadoop/io/Text  s 4  org/apache/hadoop/io/IntWritable java/lang/Integer 4v ABpegasus/RWRNaive$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;parseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;split'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String;valueOf(I)Ljava/lang/Integer;add(Ljava/lang/Object;)Zcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vsize()I(I)Ljava/lang/Object;intValue(I)Vpegasus/RWRNaive!,-./01232456J****7OQ RS8 9<=>6_*+*+ **g*o Y  ***7V WX*Z^[8_9<_?@AB6 9Y:,M,:  :  , v  9 2 W-+Y Y !"#$6   o96 >-%Y&'()Y Y *"#7J`b de#f,h3i>jLl\o_rtuvxyx{8p #9CD ,0EF 9<GHIJKLMNCO0P2 QRSS0 T IUKV QWX: Y?Z[5 \]^_`YAabcdAAe6_ *+%,-+7O84 9< fg hJ iL jNabcklm; ,: PEGASUS/classes/pegasus/RWRNaive$RedStage2.class0000644000000000000000000000664611443145620020234 0ustar rootroot2 R S T! UV WX# YZ [\] R^ _ `a b c de fg fhi c jk jlm n opq rtuv number_nodesImixing_cD()VCodeLineNumberTableLocalVariableTablethis RedStage2 InnerClassesLpegasus/RWRNaive$RedStage2; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strLjava/lang/String;key"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; next_rank previous_rankLocalVariableTypeTable1Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTablew Exceptionsx Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFile RWRNaive.java %& !" #$y z{| }~  java/lang/StringBuilderRedStage2: number_nodes =  , mixing_c =   org/apache/hadoop/io/Textw v %  org/apache/hadoop/io/IntWritable 23pegasus/RWRNaive$RedStage2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Double parseDouble(Ljava/lang/String;)Djava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)Vjava/util/IteratorhasNext()Znext()Ljava/lang/Object;charAt(I)C substring(I)Ljava/lang/String;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/RWRNaive! !"#$%&'A***( ) *-./'C*+*+ Y  * *( B)C*-C0123'? s99,;,:  s 9 c9*k9-+Y Y  (. (6DGPr)R'45 s*-s67s89s:;s<=p>$m?$@s8As:BC/D EFGHA2I'_ *+,-()4 *- JK L9 M; N=EFGOPQ, s+ PEGASUS/classes/pegasus/MinMaxInfo.class0000644000000000000000000000046211443145620017025 0ustar rootroot2 minDmax()VCodeLineNumberTableLocalVariableTablethisLpegasus/MinMaxInfo; SourceFilePagerankNaive.java pegasus/MinMaxInfojava/lang/Object  /* !  PEGASUS/classes/pegasus/JoinTablePegasus$MapPass1.class0000644000000000000000000000752511443145620021640 0ustar rootroot2 )b (c (d+ ef gh ijk bl m n o pqrstu vwxy #oz v{| v}~ v v ! #  ( number_tablesI column_index()VCodeLineNumberTableLocalVariableTablethisMapPass1 InnerClasses#Lpegasus/JoinTablePegasus$MapPass1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vijob"Lorg/apache/hadoop/mapred/JobConf; input_fileLjava/lang/String; path_name StackMapTablemap(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V colon_poskey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textline[Ljava/lang/String;tab_pos second_str value_strLocalVariableTypeTableiLorg/apache/hadoop/mapred/OutputCollector;N Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileJoinTablePegasus.java ./ +, -,   java/lang/StringBuilder.MapPass1 : configure is called. number_tables=  map.input.file input_file=path Column index =  , path_name=# [ ]  bsf:  org/apache/hadoop/io/IntWritable .org/apache/hadoop/io/Text . !org/apache/hadoop/io/LongWritable AB!pegasus/JoinTablePegasus$MapPass1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VindexOf startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String; substring(I)Ljava/lang/String;charAt(I)C(I)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/JoinTablePegasus!()*+,-,./0A***1%' (2 36780F*+Y  * +MY  , N6*3+Y   N,- * ʲY  *  - 16 + -)/00I1L2X3q4y5629:24O99,36:;0~<=Lb>=?O@@2AB0 ,::6`:v:!6  `:Y  *   : -!Y2"#Y $%1:>?@BD#E.F9GDHNIWJbMPQ2p W C, 36DEFGHIJKL=MN#O,.tP=Q= R HS?@2T@UVWXAAY0b*+&,#-'1%2436Z[\[]I^KUVW_`a5 (4 PEGASUS/classes/pegasus/SaxpyTextoutput$RedStage1.class0000644000000000000000000000522411443145620022017 0ustar rootroot2f ? @A @BC DEF ?G H I J K LMN OQRS()VCodeLineNumberTableLocalVariableTablethis RedStage1 InnerClasses#Lpegasus/SaxpyTextoutput$RedStage1;reduce(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey"Lorg/apache/hadoop/io/IntWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;iI val_double[DresultDLocalVariableTypeTable;Ljava/util/Iterator;iLorg/apache/hadoop/mapred/OutputCollector; StackMapTable* ExceptionsT Signature(Lorg/apache/hadoop/io/IntWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileSaxpyTextoutput.java U VW XY#org/apache/hadoop/io/DoubleWritable Z[org/apache/hadoop/io/Textjava/lang/StringBuilderv \] \^ _` ab cd org/apache/hadoop/io/IntWritable e!pegasus/SaxpyTextoutput$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;get()Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;toString()Ljava/lang/String;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/SaxpyTextoutput!/*W / g6:RR,,R11c9%-+YY  . [\] ^`a,b2e=fDgfhRgg g!"g#$g%&d'(_)*=*+,-g!.g#/0132345A6_ *+,-W4  78 9" :$ ;&234<=> P PEGASUS/classes/pegasus/ConCmpt$MapStage3.class0000644000000000000000000000464111443145617020145 0ustar rootroot2b 9 :; <=> <? <@A BC D EF GHI JLMN()VCodeLineNumberTableLocalVariableTablethis MapStage3 InnerClassesLpegasus/ConCmpt$MapStage3;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter;line[Ljava/lang/String; change_prefixCLocalVariableTypeTablebLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsO Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFile ConCmpt.java  PQ#R ST UV WXorg/apache/hadoop/io/TextY PZ [\ P]^ _`!org/apache/hadoop/io/LongWritable apegasus/ConCmpt$MapStage3&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/io/IOExceptiontoString()Ljava/lang/String;java/lang/String startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)Cjava/lang/Character(C)Ljava/lang/String;(Ljava/lang/String;)Vjava/lang/Integer(I)Ljava/lang/String;(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/ConCmpt!/* @,,:26-Y Y   " ? H@@@ @!"@#$(%&"'() @!*+ ,-./A0b*+ ,-412324"5$,-.678 K PEGASUS/classes/pegasus/matvec/0000755000000000000000000000000011443145617015254 5ustar rootrootPEGASUS/classes/pegasus/matvec/MatvecNaive$MapPass2.class0000644000000000000000000000516211443145617022124 0ustar rootroot2k >? > @ AB CDE CF GH IJ KL M NOP QSTU from_node_int#Lorg/apache/hadoop/io/LongWritable;()VCodeLineNumberTableLocalVariableTablethisMapPass2 InnerClasses%Lpegasus/matvec/MatvecNaive$MapPass2;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VkeyvalueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textLjava/lang/String;line[Ljava/lang/String;LocalVariableTypeTabletLorg/apache/hadoop/mapred/OutputCollector; StackMapTableV ExceptionsW Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileMatvecNaive.java !org/apache/hadoop/io/LongWritable  XY#V Z[ \]^ _` ab#org/apache/hadoop/io/DoubleWritablec de fg hiorg/apache/hadoop/io/Text !j#pegasus/matvec/MatvecNaive$MapPass2&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOExceptiontoString()Ljava/lang/String; startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;java/lang/Long parseLong(Ljava/lang/String;)Jset(J)Vjava/lang/Double parseDouble(Ljava/lang/String;)D(D)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/matvec/MatvecNaive!>**Y   !A,: :*2 -* Y2 (@HAA"A#$A%&A'(;)*'+,- A%./01234A 5b*+,-467879&:(123;<= R PEGASUS/classes/pegasus/matvec/MatvecNaive$RedPass1.class0000644000000000000000000001035611443145617022121 0ustar rootroot2 'kl k &m &no k pq prs tu vw vx vy z {| }~       {    {  & to_nodes_listLjava/util/ArrayList; Signature'Ljava/util/ArrayList; to_val_list)Ljava/util/ArrayList;()VCodeLineNumberTableLocalVariableTablethisRedPass1 InnerClasses%Lpegasus/matvec/MatvecNaive$RedPass1;reduce(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VentryEntryLjava/util/Map$Entry;iterLjava/util/Iterator; line_textLjava/lang/String;line[Ljava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valuesoutput*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; vector_valD isValReceivedZto_mapLjava/util/Map;LocalVariableTypeTable9Ljava/util/Map$Entry;OLjava/util/Iterator;>;1Ljava/util/Iterator;tLorg/apache/hadoop/mapred/OutputCollector;3Ljava/util/Map; StackMapTableB Exceptions(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileMatvecNaive.java /0java/util/ArrayList )* -*java/util/HashMap org/apache/hadoop/io/Text     java/util/Map$Entry!org/apache/hadoop/io/LongWritable java/lang/Long /#org/apache/hadoop/io/DoubleWritable java/lang/Double / 0 89#pegasus/matvec/MatvecNaive$RedPass1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iterator(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporter java/util/Mapjava/lang/Stringjava/io/IOExceptionhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;split'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String; parseDouble(Ljava/lang/String;)DentrySet()Ljava/util/Set; java/util/Setiterator()Ljava/util/Iterator;getKey longValue()J(J)VgetValue doubleValue()D(D)Vcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vclear parseLong(Ljava/lang/String;)JvalueOf(J)Ljava/lang/Long;put8(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;(D)Ljava/lang/Double;pegasus/matvec/MatvecNaive!&'()*+,-*+./01M**Y*Y2mop3 47891E96Y: ,/,  :   :   vx 96 :  C : -Y Y k    2 !Y"#Wl-Y 2 YL"  2 ! 2$#W(-Y 2 Y 2kα2juwy{|&}/6ALQTblxAD3z x1:< bQ=> &?@ /AB E47ECDEE>EFGEHIBJK?LM6NO P4x1:Q bQ=R EESEFT6NU V7  WXYZ[\R]^YI !#$_`+aA8b1_ *+,-%2m34 47 cd e> fG gI_`+hij6&5 {; PEGASUS/classes/pegasus/matvec/MatvecNaive$RedPass2.class0000644000000000000000000000524111443145617022117 0ustar rootroot2f = >? >@A B CDEF =G H I B J KLM NPQR()VCodeLineNumberTableLocalVariableTablethisRedPass2 InnerClasses%Lpegasus/matvec/MatvecNaive$RedPass2;reduce(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V cur_value_strLjava/lang/String;key#Lorg/apache/hadoop/io/LongWritable;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; next_rankDLocalVariableTypeTable;Ljava/util/Iterator;jLorg/apache/hadoop/mapred/OutputCollector; StackMapTable ExceptionsS Signature(Lorg/apache/hadoop/io/LongWritable;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileMatvecNaive.java T UV WX#org/apache/hadoop/io/DoubleWritable YZ[ \]org/apache/hadoop/io/Textjava/lang/StringBuilderv ^_ ^` ab cd!org/apache/hadoop/io/LongWritable e#pegasus/matvec/MatvecNaive$RedPass2&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/io/IOExceptionjava/util/IteratorhasNext()Znext()Ljava/lang/Object;toString()Ljava/lang/String;java/lang/Double parseDouble(Ljava/lang/String;)Dappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(D)Ljava/lang/StringBuilder;(Ljava/lang/String;)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/matvec/MatvecNaive!/*  J9,,:c9-+YY    $'IH !JJ"#J$%J&'J()G*+,J$-J&./#0123A4_ *+,-4  56 7% 8' 9)012:;< O PEGASUS/classes/pegasus/matvec/MatvecPrep$MvPrepComparator.class0000644000000000000000000000247711443145617023611 0ustar rootroot2= ' ( )* )+ ,-/01()VCodeLineNumberTableLocalVariableTablethisMvPrepComparator InnerClasses,Lpegasus/matvec/MatvecPrep$MvPrepComparator;compare'(Ljava/lang/Object;Ljava/lang/Object;)Io1Ljava/lang/Object;o2s1Ljava/lang/String;s2pos1Ipos2val1val2equals'(Ljava/lang/Object;Ljava/lang/Object;)Z StackMapTable/02 SourceFileMatvecPrep.java 342 56 789 :;<*pegasus/matvec/MatvecPrep$MvPrepComparatorjava/lang/Objectjava/util/Comparatorjava/lang/StringtoString()Ljava/lang/String;indexOf(I)I substring(II)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ipegasus/matvec/MatvecPrep   /* k   ;+N,:- 6 6-66d mn pqs(t5v \ ;;;6 0((5   @+N,:- 6 6-66 & z{ }~(5<> \ @@@; 5-$(5 !> "##$$%& .PEGASUS/classes/pegasus/matvec/MatvecPrep$RedStage1.class0000644000000000000000000000726211443145617022124 0ustar rootroot2 ![\ ]_ [ `# ab cde [f g h ijk [ lm lno h p qr st uv w x yz {|}~ out_prefixLjava/lang/String;mpcMvPrepComparator InnerClasses,Lpegasus/matvec/MatvecPrep$MvPrepComparator;()VCodeLineNumberTableLocalVariableTablethis RedStage1%Lpegasus/matvec/MatvecPrep$RedStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;reduce(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)V value_textcur_valkeyLorg/apache/hadoop/io/Text;valuesLjava/util/Iterator;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; out_valuevalue_alLjava/util/ArrayList;iterLocalVariableTypeTable1Ljava/util/Iterator;bLorg/apache/hadoop/mapred/OutputCollector;)Ljava/util/ArrayList;(Ljava/util/Iterator; StackMapTabletk Exceptions Signature(Lorg/apache/hadoop/io/Text;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vv(Ljava/lang/Object;Ljava/util/Iterator;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Reducer; SourceFileMatvecPrep.java )* #$*pegasus/matvec/MatvecPrep$MvPrepComparator %(  java/lang/StringBuilderRedStage1: out_prefix =  java/util/ArrayList org/apache/hadoop/io/Text  java/lang/String  * ) 56#pegasus/matvec/MatvecPrep$RedStage1&org/apache/hadoop/mapred/MapReduceBase org/apache/hadoop/mapred/Reducerjava/util/Iteratorjava/io/IOExceptionpegasus/matvec/MatvecPrep org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)VhasNext()Znext()Ljava/lang/Object;add(Ljava/lang/Object;)Zjava/util/Collectionssort)(Ljava/util/List;Ljava/util/Comparator;)Viterator()Ljava/util/Iterator;length()Iclear(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)V! !"#$%()*+H***Y, - .012+c'*+ Y  * , &-'.0'3456+ :Y:,,:W*:F: Y   : Y   :**-+Y Y *  -+Y,N $,/8?IU]s-f $7$U48$.09:;<=>?@A$ BC?D<E*;F=G BH?DIJ KL!M3K2NOPQA5R+_ *+,-,-4 .0 ST U< V> W@NOPXYZ'^& ^/ PEGASUS/classes/pegasus/matvec/MatvecPrep.class0000644000000000000000000001070611443145617020355 0ustar rootroot2 ?m n o p q r st mu m vw xy xz{ |} v~     O m     )K )e ) ) ) ) ) ) ) RedStage1 InnerClassesMvPrepComparator MapStage1 edge_pathLorg/apache/hadoop/fs/Path; output_path number_nodesI block_sizenreducer output_prefixLjava/lang/String;makesym()VCodeLineNumberTableLocalVariableTablethisLpegasus/matvec/MatvecPrep;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)I StackMapTable configStage16(Ljava/lang/String;)Lorg/apache/hadoop/mapred/JobConf; out_prefixconf"Lorg/apache/hadoop/mapred/JobConf;fs!Lorg/apache/hadoop/fs/FileSystem;num_reduce_tasks SourceFileMatvecPrep.java PQ FG HG IJ KJ LJ OJ$org/apache/hadoop/conf/Configurationpegasus/matvec/MatvecPrep ` xMatvecPrep <# of row> <# of reducer>  ^_org/apache/hadoop/fs/Path P null  MN= -----===[PEGASUS: A Peta-Scale Graph Mining System]===----- java/lang/StringBuilderK[PEGASUS] Converting the adjacency matrix to block format. Output_prefix =  , makesym = , block width= cd  [PEGASUS] Conversion finished.6[PEGASUS] Block adjacency matrix is saved in the HDFS  org/apache/hadoop/mapred/JobConf P  matrix_rowMatvecPrep_Stage1 #pegasus/matvec/MatvecPrep$MapStage1 #pegasus/matvec/MatvecPrep$RedStage1    org/apache/hadoop/io/Text !org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tool*pegasus/matvec/MatvecPrep$MvPrepComparatorjava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/String compareToappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;getConf(()Lorg/apache/hadoop/conf/Configuration;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vset'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClassorg/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;Z)Z(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetMapOutputValueClass! ?@FGHGIJKJLJMNOJPQRe#*******S" T #UV WXRTY Y * < STYZ[J\] ^_R0S`aR{+*Y+2*Y+2*+2*+2*+2+2 * *+2+2 **Y* *!"*!#$**%&W'Y(+2#$SN '1;EPY`ltyTUVYZb  M\]cdRrܻ)Y** +M,,Y*!$-,.Y*!$-,/Y+$-,Y*!$-,01,23,45**6N-*7W,Y*S8,*9*6,:,;<,;=,;>,SJ+G`|  T4UVeNfgDhijJ\]klB4 A C D2 E PEGASUS/classes/pegasus/matvec/MatvecNaive.class0000644000000000000000000001102311443145617020502 0ustar rootroot2 ?n o p q r s t u v wx ny n z{ |} |~ z   N  !K $n $ $ $ !OP ! ! ! ! ! ! !RedPass2 InnerClassesMapPass2RedPass1MapPass1 edge_pathLorg/apache/hadoop/fs/Path; tempmv_path output_path vector_path number_nodesInreducermakesym transposeignore_weights()VCodeLineNumberTableLocalVariableTablethisLpegasus/matvec/MatvecNaive;main([Ljava/lang/String;)Vargs[Ljava/lang/String;result Exceptions printUsage()Irun([Ljava/lang/String;)Ifs!Lorg/apache/hadoop/fs/FileSystem; StackMapTable configPass1$()Lorg/apache/hadoop/mapred/JobConf;conf"Lorg/apache/hadoop/mapred/JobConf; configPass2 SourceFileMatvecNaive.java QR FG HG IG JG KL ML NL OL PL$org/apache/hadoop/conf/Configurationpegasus/matvec/MatvecNaive a cMatvecNaive <# of nodes> <# of reducers>  _`org/apache/hadoop/fs/Path Q   fg kg org/apache/hadoop/mapred/JobConf Qjava/lang/StringBuilder MatvecNaive_pass1 #pegasus/matvec/MatvecNaive$MapPass1 #pegasus/matvec/MatvecNaive$RedPass1   !org/apache/hadoop/io/LongWritable #org/apache/hadoop/io/DoubleWritable org/apache/hadoop/io/Text MatvecNaive_pass2#pegasus/matvec/MatvecNaive$MapPass2#pegasus/matvec/MatvecNaive$RedPass2!org/apache/hadoop/conf/Configuredorg/apache/hadoop/util/Tooljava/lang/Exception!org/apache/hadoop/util/ToolRunnerY(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)Ijava/lang/Systemexit(I)VoutLjava/io/PrintStream;java/io/PrintStreamprintln(Ljava/lang/String;)VprintGenericCommandUsage(Ljava/io/PrintStream;)Vjava/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/Stringequals(Ljava/lang/Object;)ZgetConf(()Lorg/apache/hadoop/conf/Configuration;org/apache/hadoop/fs/FileSystemgetI(Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/fs/FileSystem;delete(Lorg/apache/hadoop/fs/Path;)Z"org/apache/hadoop/mapred/JobClientrunJobI(Lorg/apache/hadoop/mapred/JobConf;)Lorg/apache/hadoop/mapred/RunningJob;:(Lorg/apache/hadoop/conf/Configuration;Ljava/lang/Class;)Vappend-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;set'(Ljava/lang/String;Ljava/lang/String;)V setJobNamesetMapperClass(Ljava/lang/Class;)VsetReducerClass(org/apache/hadoop/mapred/FileInputFormat setInputPathsA(Lorg/apache/hadoop/mapred/JobConf;[Lorg/apache/hadoop/fs/Path;)V)org/apache/hadoop/mapred/FileOutputFormat setOutputPath@(Lorg/apache/hadoop/mapred/JobConf;Lorg/apache/hadoop/fs/Path;)VsetNumReduceTaskssetOutputKeyClasssetOutputValueClasssetMapOutputValueClass! ?@ FGHGIGJGKLMLNLOLPLQRS2********* * T* 1 "',U 2VW XYST Y Y*<TUZ[\L]^ _`S0TabSX+*Y+2*Y+2*Y+2*+2+2*+*Y+2+*+2 +*+2 *M,*W,*W*W* W,*WTV &4>INTbit{U VWZ[-cde  C]^fgSl!Y* "L+#$Y%&'*()*+$Y%&'*()*++$Y%&'* ()*+,$Y%&'* ()*+-.+/0+12*+Y*S3+Y*SY*S3+*4+*5+67+89+:;+TF + G c UVWhie j]^kgSu!Y* "L+#$Y%&'*()*+<.+=0+>2+Y*S3+*4+*5+67+8;+:9+T2 %&+(1*8+?-N.V0^2e3l4s6UuVWfhi]^lmB"> A = C 1 D / E PEGASUS/classes/pegasus/matvec/MatvecNaive$MapPass1.class0000644000000000000000000000772111443145617022126 0ustar rootroot2 %Z[ Z $\ $] $^ $_) `a bc+,d efg Zh i j k lmn ko pqr ps pt uv wx y z{ | $} from_node_int#Lorg/apache/hadoop/io/LongWritable;makesymI transposeignore_weights()VCodeLineNumberTableLocalVariableTablethisMapPass1 InnerClasses%Lpegasus/matvec/MatvecNaive$MapPass1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf; input_fileLjava/lang/String;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)VkeyvalueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textline[Ljava/lang/String;LocalVariableTypeTablejLorg/apache/hadoop/mapred/OutputCollector; StackMapTableG Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileMatvecNaive.java -.!org/apache/hadoop/io/LongWritable '( )* +* ,*  map.input.file java/lang/StringBuilder MatvecNaive.MapPass1: makesym =   input_file = #   org/apache/hadoop/io/Text - - <=#pegasus/matvec/MatvecNaive$MapPass1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/Stringjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)Cjava/lang/Long parseLong(Ljava/lang/String;)Jset(J)V(org/apache/hadoop/mapred/OutputCollectorcollect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/matvec/MatvecNaive!$%&'()*+*,*-./Y**Y***0689:;1 2567/d*+ *+ *+ + MY*Y,0> ?@'B.DJEcF1 d25d89.6:;<=/,:: *2v)*2-*Y2 !*L-Y2"Y2 !*o-Y2"Y2 !M-Y2"Y2 !*&-Y2"Y2 !*~-Y2"YY22 !*-Y2"YY22 !{-Y2"YY22 !*;-Y2"YY22 !0bJKLNP)Q6RDS\UcVWXZ[\_`a;bCc~efgj1H25>(?@ABCDE;FGH AIJKL2OHwMNOPA<Q/b*+,-#061425RSTSUBVDMNOWXY4 $~3 PEGASUS/classes/pegasus/matvec/MatvecPrep$MapStage1.class0000644000000000000000000001016311443145617022121 0ustar rootroot2 #d% ef gh "i' "j( "k lmn do p qrs t uv tw xyz x{ x|}~  x  " block_sizeI matrix_rowmakesym()VCodeLineNumberTableLocalVariableTablethis MapStage1 InnerClasses%Lpegasus/matvec/MatvecPrep$MapStage1; configure%(Lorg/apache/hadoop/mapred/JobConf;)Vjob"Lorg/apache/hadoop/mapred/JobConf;map(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vrow_idblock_idin_block_indexelem_valLjava/lang/String;col_id block_rowid block_colid in_block_row in_block_colkey#Lorg/apache/hadoop/io/LongWritable;valueLorg/apache/hadoop/io/Text;output*Lorg/apache/hadoop/mapred/OutputCollector;reporter#Lorg/apache/hadoop/mapred/Reporter; line_textline[Ljava/lang/String;LocalVariableTypeTablebLorg/apache/hadoop/mapred/OutputCollector; StackMapTableL} Exceptions Signature(Lorg/apache/hadoop/io/LongWritable;Lorg/apache/hadoop/io/Text;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vt(Ljava/lang/Object;Ljava/lang/Object;Lorg/apache/hadoop/mapred/OutputCollector;Lorg/apache/hadoop/mapred/Reporter;)Vx0Ljava/lang/Object;x1x2x3Lorg/apache/hadoop/mapred/MapReduceBase;Lorg/apache/hadoop/mapred/Mapper; SourceFileMatvecPrep.java )*  %& '& (& java/lang/StringBuilderMapStage1: block_size =  , matrix_row= , makesym =  #  org/apache/hadoop/io/Text )  !org/apache/hadoop/io/LongWritable 67#pegasus/matvec/MatvecPrep$MapStage1&org/apache/hadoop/mapred/MapReduceBaseorg/apache/hadoop/mapred/Mapperjava/lang/String(org/apache/hadoop/mapred/OutputCollector!org/apache/hadoop/mapred/Reporterjava/io/IOException org/apache/hadoop/mapred/JobConfget&(Ljava/lang/String;)Ljava/lang/String;java/lang/IntegerparseInt(Ljava/lang/String;)Ijava/lang/SystemoutLjava/io/PrintStream;append-(Ljava/lang/String;)Ljava/lang/StringBuilder;(I)Ljava/lang/StringBuilder;toString()Ljava/lang/String;java/io/PrintStreamprintln(Ljava/lang/String;)V startsWith(Ljava/lang/String;)Zsplit'(Ljava/lang/String;)[Ljava/lang/String;charAt(I)C substring(I)Ljava/lang/String;collect'(Ljava/lang/Object;Ljava/lang/Object;)Vpegasus/matvec/MatvecPrep!"#$%&'&(&)*+/*,,- .123+\*+*+*+ Y  *** ,3 45'7[8-\.1\4567+,::2vm26*l6*p6 -Y Y Y Y  2g2626*l6 *l6 *p6 *p6 2v2: 2: -Y Y   Y Y   2-Y Y   Y Y   * S-Y Y   Y Y   ,r<=>@A!B"D/F8GAHJJKLMNOPQSWXZ\R]U`bcf-8^8&AU9&JL:& ;< \;< [8&R=&I>& @?& 7@& .A& .1BCDEFGHIJ<KLM FNOXPQvV RSTUVPQP^RSTUVPQWXYZA6[+b*+ ,-!,,-4.1\]^]_G`IWXYabc0 "/ PEGASUS/do_ccmptblk_catstar.sh0000755000000000000000000000052511443145611015226 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr cc_edge hadoop dfs -mkdir cc_edge hadoop dfs -put catepillar_star.edge cc_edge ./run_ccmptblk.sh 16 3 cc_edge 5 PEGASUS/do_ccmpt_catstar.sh0000755000000000000000000000052011443145611014530 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr cc_edge hadoop dfs -mkdir cc_edge hadoop dfs -put catepillar_star.edge cc_edge ./run_ccmpt.sh 16 3 cc_edge PEGASUS/do_dd_catstar.sh0000755000000000000000000000051711443145611014017 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr dd_edge hadoop dfs -mkdir dd_edge hadoop dfs -put catepillar_star.edge dd_edge ./run_dd.sh inout 16 dd_edge PEGASUS/do_hadiblk_catstar.sh0000755000000000000000000000055211443145611015025 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr hadi_edge hadoop dfs -mkdir hadi_edge hadoop dfs -put catepillar_star.edge hadi_edge ./run_hadiblk.sh 16 3 hadi_edge makesym 2 noenc PEGASUS/do_hadi_catstar.sh0000755000000000000000000000054211443145611014333 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr hadi_edge hadoop dfs -mkdir hadi_edge hadoop dfs -put catepillar_star.edge hadi_edge ./run_hadi.sh 16 1 hadi_edge makesym enc PEGASUS/do_prblk_catstar.sh0000755000000000000000000000053111443145611014536 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr pr_edge hadoop dfs -mkdir pr_edge hadoop dfs -put catepillar_star.edge pr_edge ./run_prblk.sh 16 3 pr_edge makesym 2 PEGASUS/do_pr_catstar.sh0000755000000000000000000000052311443145611014046 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr pr_edge hadoop dfs -mkdir pr_edge hadoop dfs -put catepillar_star.edge pr_edge ./run_pr.sh 16 5 pr_edge nosym PEGASUS/do_rwrblk_catstar.sh0000755000000000000000000000073211443145611014732 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr rwr_edge hadoop dfs -mkdir rwr_edge hadoop dfs -put catepillar_star.edge rwr_edge hadoop dfs -rmr rwr_query hadoop dfs -mkdir rwr_query hadoop dfs -put catepillar_star_rwr.query rwr_query ./run_rwrblk.sh rwr_edge rwr_query 16 5 makesym 16 0.85 PEGASUS/do_rwr_catstar.sh0000755000000000000000000000073011443145611014237 0ustar rootrootwhich hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi hadoop dfs -rmr rwr_edge hadoop dfs -mkdir rwr_edge hadoop dfs -put catepillar_star.edge rwr_edge hadoop dfs -rmr rwr_query hadoop dfs -mkdir rwr_query hadoop dfs -put catepillar_star_rwr.query rwr_query ./run_rwr.sh rwr_edge rwr_query 16 5 makesym new 0.85 PEGASUS/Makefile0000644000000000000000000000053411443145611012325 0ustar rootrootall: demo demo: demo_hadi demo_deg: ./do_dd_catstar.sh demo_pr: ./do_pr_catstar.sh demo_prblk: ./do_prblk_catstar.sh demo_rwr: ./do_rwr_catstar.sh demo_rwrblk: ./do_rwrblk_catstar.sh demo_hadi: ./do_hadi_catstar.sh demo_hadiblk: ./do_hadiblk_catstar.sh demo_ccmpt: ./do_ccmp_catstar.sh demo_ccmptblk: ./do_ccmptblk_catstar.sh PEGASUS/pegasus-2.0.jar0000644000000000000000000077671611443145620013355 0ustar rootrootPK E,= META-INF/PK E,=. YfMETA-INF/MANIFEST.MFMLK-. K-*ϳR03rCq,HLHU%AE%)N@zI y Eŕ% yz\\PK E,=pegasus/PK E,=pegasus/matvec/PK E,=8ǦH|pegasus/BitShuffleCoder.classV[pWVZEVbdʲM58Z\'S;"ܤV"˪J(-!\\f&[s1$ӡ 0fR``ᵃΑ,۱ g `~4#ǰG\hx8 F8[G/oqϊEAC:l lڍ+F(PH9 Ygwq|ﷇ34M$;Uꎥ B}9g. iwXqd$8-rqpѾ#13cgG;tvtKo^y71;Sw9ܼȒٜifw Ō#Gҙ\dq]'5ga]')7M\K.[ɣvN#]FΓ̭kD' wӵ%'Y;%pbr4g)U<|>a.8I0*D^t(T7SŊ.?NӲR\o" VвU\gI'i&v:s1N X6,{?#9-uNk:0C@ϻ &'{hR՘mKXR[G.[K3gPݛR{koh`pjo0u ,:|x2/HGPL5IM|%/ԋh|"-ς7UKKm. \x˓!)`#SzAbhģD-?u"η |}z؝vy?],—яs؍o!؃7%'a<^ǘE#= +r Hʝ#au C|k0Wg>_$!F< pM ;3 e#ƫ>9JWUed^e,4|L>QђLG™ (ai1Rk.!nˋ[z7 R|vwC Td(%nI5gP]A 5=D j09F9()JA*tzImT7bA9 K`6KVp=!4CZ ,SLҌO`؍Pv1@Lu.,٨{XP,xD`2q&cNL4n,CSf֑"~n0; ?o3!j"̒LcgK[-9+ yK{o@SLi1$NRSSL<Jd4ŋde297gh="^/1*^㹓+mtFku2vxM:5qxI6#O%6Wp꘦".|N:WrZc7nxjcS_Āָ217'oI Onkma fI)OV^*'fx6ϣg}KHvOpɠ;oiA#<69/)T/#x_fʗI?FRf0Xϙ/˯믉Tox=wmy0C5jx=OfxTxC>32.$q)"Z< LT]7ʞK{`@gyQRZ,R9f+Ϳ{A \_~/PK E,=HnՅpegasus/BlockElem.classmKK@4/ժPZ_hBQ.Ri 5I[qQxf {r织; l}S2d!80xxIVn,zo/7HZn×vpZe'Vr}U,jM z\q>Sq}}Wwª{ZҫUn3ںv#{!f #ϹS\RUFUF={I%F/`fZP/)+hs*zrq$H*I_`g_(ZTF7$o7i]gPK E,=T $pegasus/ConCmpt$CombinerStage2.classVKpSUN$m)!%hPB"6MmUP|ϸѕ6npтʅ :>fGqtnsґ6??Χ>j|;vI #؃Cx}DPu5T3)`(Gc0=( %aK<&\gfMw@EC.0fuYc[>o8;4Ojޥ;NX~282V h]Yi=3x_m)=V6ݥ 41IX7xW혮}@yLǻ\],ÕmJ];&tHNOð-;>cX :*gSIvM+KsyoHO>vn,oN^J';ٔ^DE%怀4F_BÆ@q9@5I5 @3[t4~Fj-ci@=挽BRI4M1Z wW6鹲p+kc2k坤T J#cP cXM)RÓxKpO )S$,wy۔7T/R//{ixXKE^V6d*^SV'7[Luwe3{JK^r~W@L#]`UC}A=EYs6eOz 2"Z&RP8Sц4/5.dNV]UH52R]fvd1j[mYvr8*`$&#TڥSXRLFZ D o(|7D̺,V' La sMcfA3s (d)g2B,.g:`T\!!cr= G!O#>9=cjFQ qa5 GQ[;gcss+p[Ѡ"p{$h]HowAsAt,@iDw c\Z59u? H||Eb|CoIC<#,?c~1; x=Mڨi9d hK@3Bb+i-VqުyZ|jC;m n%6J v[Q$:RbH "$Sb&u.P~"LV? ڂȹf#MJVMpjZq[b|ʇq¢UyU.xZO=c bPK E,= ǁ pegasus/ConCmpt$MapStage1.classVsUvK2<$@ - - @V@66]7h}oEŷ3AO~pƿHMHh88ùw9ys_ЃeD1*I- b82 y 2Z1bUI A&I9Lu:a-6MՑt #REdL(QjQ2&z\]" #]ʃMج` [M ^^R2.Iʰzu,ik6]8 ^낼M%m j(x>}\MA!0p )2l\ brׂa[s}a`-,%?\%Z4qbynSΣJ0Uȓ^f'\rl2uIRW IYU%%ƝFd DrV-8S {X4Mx>^|Ěk8S/pb:/{LԪ}Qc#n<;M0ZCfN#MFk4~,~-g/NtZwy^K؇5x;T`'2Fw$h5 Y %VƲdDe{}_xy+mW}+~oOx#$ވCxK|@P5`!Ml`յ|-OMKeLZnS% BDXh7|.$)'088K8ESt_G;;ZE-cTN0eR=ή |~I(3X"C o#$H8ǫv0j P}4Y︅;o!a?DY/ 6aY͙:(q9-9-,\dvK:wxqQ|ݰm/ceMfŲ"XЧ˾Ssio"D:P1^p,dR C~ q*Nưiu[%ໆU"_;Q7$',>rQ8a[;4:dqkFc'j3 mX>'ofHy 3|X(8BmZCr v-ۆfhhH```M i5^E6HtkAPK E,=uCص pegasus/ConCmpt$MapStage3.classV_wDM7 ] E *EȖ@ATPV@t IN2)_w?/8zݶZ9̽3νgnoO025Pĥ"^ ^Vˢ%W( \ŵu Cu7td({'rVeAs 0Rq*ot2Z]Sޡ&^P-q' ѱz$Z<$f$OV&aܲyݶ|# # zcO*_lU%Dd7rGF鄾/\"0"$ Sb?~G@,)2l0S=|*;{}소EaM{q.}m100کls`2;O sj\rE$0Ht,S_5Ve'&OWxl.i8{䴚q_Ys$2z?7HZW\If"/`;+y9"71&>'S:VL4J|ħX3qKݾ|Kw 6q \d89X4 ҂"X`8dUEy5<ɺ' c*i&ќ;%KK]ѬZA8$n:,'g&jNkZ kзƠ]~_}2ҘOp>9ԓMO?{Dh-0fW(Q˿ЩNtqfCmhB{n(\ʏ9Q5VxcמARG3X&0 8tL.3#r믇Eq" B LBMY> o$$S+ҟ& c&03iB}8t䉖O?3gǵm ]]e)rJ =eCaSƉ$GXwPK E,=0oj pegasus/ConCmpt$MapStage4.classVYo@q5m(g MBKRphRk[?ABH{Ih847ǎ@/Nn1p:N8)I\qIG&\qU5Rk)D.?ϠW]q;Z`Wwy`}KE;dH~E8`NJCAG˾0 疖|9[$Kgx+{A>- k/xoٞUW1^eN,˸#k&~$K㈚h * 9áP1$q'*)^ Cs A U<Щ$`O3n9ܭ[n4QK^fcГYvQcWk—:)]w?î2S}B1]c?7Kˑ:9qg Y%dMxQPWmfk<Uy0f7puMm51;:EEYq<3 l4Cm&إ CUa%hS$`ؚ zwlHù_/c'Q:atk2+>BA[u- Q:}F zɯ U G;A~YDd5Kb֔JT6$T+uIuc#+HfhH>̦ RoFhD" # c#hݖOƞ1>B8@v7d)aP0H2ϑ~x$i;H1b$LG#3V`DSwgG)Uq] X g5dIѓAԠMHPK E,=B_pegasus/ConCmpt$RedStage1.classWwfBFH]!&H+Qkl$ f`wfݙMjbj*Ek[[ni-?=?GK?wl܄z''w}~w5`G'4lq9+Q!akxLQ´HX ;Sx\-OH|UG'| u| O)|]}:oIa=W``Yqo2u3LQmrw5B-8ob{`,#! Wֵ}O)V)6XCf!p+?xYW%αͤo'q8fg__ɬG*q{'-3\=S!2vQW>$:u1TL[۪Ό0إ0; lGď ?1SL₁7-mrJv.0I$$i{IwJ.xɻ~_墁_k~ n!5c IJYW?+7 2k,*x=ky\a{@FX+Ywk +M]v l]]F51,V 5A@쯾"b9h fѳ\7n[y*qЌԌ|>Y,3Y(XV窱>o^5w7AN9gټ j:QKo}O-#""9*J$Q4U7sre2 v5R&!UNͮglW<8KjQΧ++mѠSv?('2"e^aG$H1?{S -A#Fo Rd_—$oi(wAhq ɝp/TC${+*P26 #3|yZcOVcaT_U t}$ѯPI93)M Ir<)"[ 9U,QPP0,hfMg@UG 3k(d {ޗI]=m} y@)E@kf ;yW;rFJ򱄕MdrΊmF!IAǢ9=9h~L+֞uvۦ#u zXcغcm2zRc'WpV:m$g2rMm1"Pֳ֑sL+Ksɂݛ4{mfStTI%fh'Cn  Jrh~Y PM/Ma4@bҝĮ\& I+KAx& I 2fFMN'աdb2FMڜ5&H*Kuk[N<`D!ȲR<26SY)t3cpe1 GHguߢ*Y[^EfZ^2]nwڢ+QR,nқNH*Ic)G+5 wNa[sTxAËxI 2P /Y+8U Q m[ f_E%eM}"]NGYX>7BJJRX9WWx&)-mN ,aR ^ d,%tٻ}ٮYw3?P5)ɎW{0R`Nzr#C-_Y =#?=3|7+I}mS"P.B:{3-s>sG3QAX5mz"U\XbPUYpƜqS\xP~NF`,d)P=yZóKuV_U\ZW7WSd#p1('h\O#bEk'Tgj"Zc;üI(`:zNBJE( _a^v RkE* 0?FxBty9D|Xp pЊ1D/kT<:|0#ߣ ?GBSx?<_828<0~gǻv7|i\_}!/A \5ވ A =5#(.BָXU 3@rwr/)G.˹G,xOH*=4j,(]K&5B;%{ףxMɎL\ɜel\}55GʹaX;X tTO`gwPK E,==0?^ pegasus/ConCmpt$RedStage3.classVKsEz,p[Nl˯lID%)yywk.pB8sH%|KRD3}=#_\,L3wW djcMu`p=! 0pGU۱5Dav[ YtwpvF-[mzL{v 0XQPe8/6d(M=UAXE){Ṷ j71з-ڞzZ[0\)ߕҊBaBW:K(6۳39*zQo64q' |]AQ(0J+n87ƞ 6!~ċlgB >~&G앜P5'@q)/lI:,;dXk3]/;:s41MIomAy+ZP@@c#i)r8Nl:`Bnz=ZxpFBW$Djz"}OoS?cqg$oSG0  8N0(L1IǴ88y151Co,&(dg` SJ 1ui4s$b,1kxߦp-Si3K_6<z-ƚ\krRԹy`aquv.앦cqݸ#a8FM9\?Ihce˖~xnE.؊pEuQ*ߛy~, 5۰V0lͨKjfpz{\$Z] MfU_W(UG5\n'T:L(d&*ad'&:Y3mPtT:n"뢵(ٸO:Bkȶ%_bMO1m?r,~޶?]QHF˃ϤƒK~], ujzR۰ILň Np"A\(k ,TA_PK E,=Qw)pegasus/ConCmpt.classYy`T?rL.$L% I$HaQ@(JTd&h]XZҦT&TͺZ..}v񽢕wL&0,w,}?|i"j.Jz_J,&K ~Et  Q~͕] l$O<:Q/ n4xM2,]-~>JUiRR;v3dl%lE,)6xs g92s ^(+5 >E.3WIb;u^e\R?uG ^&n_`*W?fЉ""{@ҹW簟_Ol7x@0Ћsq?A Mà]qB3:g7(KuL't\_U:R童7|Ο2:ӂ :ߨMpeV+Ng2; +=/d Ri%FJuLl-X?ы\=ٜ%]L4*<J[D2"hkYKmFV'%@֊[=Pviܢd% P$LeZbp?KƝY+n%Kíd.eVe&Ş1d81+PK teD?$\:i+ [i (UTet/$ qz2q LޙD4;UW="lBY"Jtg9fQeryļx* P4tlݚWB~,.3 V&T6LaoF;lJ)ZT~DcV 6 2 @LZH%114 ^S0|η"zu:SmSɄd/*XqW`\1 K%/KLR0=X;O8OqP+,x Cۣ<}¯'ڼ1&T+Ve[}dC$Blo E:=\^#bGė ܕF95%EQKh!gL/_

QEV1M<ZG6l"31']\:l-PN45]&I)Jۤx4i;).z& Er co3|ΛM4|uXppf1ENh$AJ Z% ɾcF6YQ6 e;;іԅ%}$*uɟMcҳ-!#Hd=W{M<φL_Lt܁:7E$vI'(6XȻYAx-%&ѝiI~f͚fswZPS\`lg 1[3M/,084@"bid(Ϛ#@5FM .s ~Uz ѹJcqh(66hg`~aq ^HRBvq?S(L"\a\oES7M1fqh 8$8PO.Xl oI&*f0_ůl9%XD[DGpXx;xD;HE/y:8 "fBSh4K6?rJ)+3xZܯGa}9Ga+LS*Y>} ~ Xv6$ڷl1[}+BVe$W!E'Q%݅g|$#P/)j{-P}ء8tCu6QНݥ =vSZIChfq^.)RxJ7vM>)Ҭ܎푿߀;QyX^H\d>"ʋ;kNЁ ')pɅؽ4jK U {hq{(0,{4fwAr:W(ki:h5y` 7I}q8M&AՁcT%y XC_zm{3{\7>]廟=tܣ {iFW [.[n[-jW hCd+vXi=lN MGFkw%sAE|1h7٨,\)w-T<+iNݤ/Cvy:O#bT}Eebv$\*X'H>( h:h飜8I Jux=h/g2z/Jz '&bWкo[^fwNǟ+F$~H[_zxPh)~XȏhWVVަmڽC;mK{oў!5~B{jo;uϸG/w{?Kr[s#'m4$N>d0h tYK}#rgtN/&tN`pcWtЏfy\mqGOHƋ\ Y6TMi< {*H/x!R+H/"ſL?"^q^^0URyMfky' 8[q,~ߤۖjgڼ6f%QKVݿWhy{EmE`EVV\5$8WBvu!߬,5яIeb@<5|@iט2B̶k\7y;B%.BF) 1&<@Amykn֚nb!)>9dIJ)<+PNQ=EiBKEt'o'?;gzWhOælX;Ps@5Rq传gOyy: 5[{BsaN z4M&~06Mp4~Ŋ C>p O{|-߹"ZZ9E^X2)Ctq`ņ(It6}l $jz\)9Xq94z5^69dos5>maۍ(> t*"Jjt6F5Z]UӍY f˩~yw{nP2*& %c@;VIA;ǩ({p 2!]:Nث?yKR[hl!m{i*j e}DK.;`,D.o+t%Uyd)D Un9r-F ӧ@ص]M6*ԨM NvA8&}Z=mh6N JN㿌ZP5@}j)O77 x @yvZ4u:LNWh3,usRO6Vloa`Wv_Wb΃b1*56hS :n}t~t3@oV{Ao<荠ޠU^EPK E,=2g$ )pegasus/ConCmptBlock$CombinerStage2.classV[pUN$MiKS {ZZBVmH#hPbntfwljr}}_!Q'|Aˌxqtg|mHKFfx9:&a!v_=O {9I~ q@A]!n(ٲp ksRrmEk 2n"K4(d1جze<'(eQc2IeS23 .oc=9ba{ISItPƳ8As2I/EKTQ2^+™rj`XRaXYY\ @xe3+y%WM[0q;dwz=6MC\m\kR_h[}*<.bvIDS2Sl[3i+ R!g@5CAea!5:8[#?EdXu|sbxig@<]!_g8 ">]һΣzH5 hȣ6XȟG}mVG`O`94TѠh3x5p46o(bFp7#-DB"d@vyXFDg1k?PK E,= $pegasus/ConCmptBlock$MapStage1.classVSF˖QD&u&q $M(pI -P,o@HIΐK/:䜋iL&2([a~N:}v;ϧFa* L%1`$>󬌛 >-22>F;2*Ǻ0')+22d|ɐ0m3deS %w-4=%ЭE3źg輣@h+Xs:[ryMVpºXvn@.ْ4ՍUq5J][@8G, jxUn=(8ō &bk@uN|: QHreHzMT`BlXY[ꚥ5x]#I0ޏf bT\WU\CVE KH#nং[*0ං1 Æe# d C4g yˋ&IZv35*;DA,LyT]yGWmԴ/7d)"w^%[ҹËBs膭m5|0n2-0uŜ>YMS}rJ8 7+3y!^2Cle=R Cv q*vTjpy~/qVI/aH{_:JiԌi8_KF:\oؖO(Y/ꃿFm'arvbgC${Hj.%#A;G:%E1ʽdl9b|E v-qCvsi/! w1pyPU@CLJ:wE! +P4é}ӡG,| $pegasus/ConCmptBlock$MapStage3.classVWF ,!*`u(Zۺ*-Kj[;$'?9O}{Odpq2;?`u|d y ^q A0=X|Af/t*Gc8?U7H4i/ҥMf3kiwsUf[4Q#8ƍMG X"5Wm\"5?WXDN)'uOl^P/5GJrQ-jWRKb&{W/%_˶2jA+rMWԡWyA6>70cD +&V%Ⱦ>tS+;t|6_C|0ƨM;:8É'$Ùjp ŠTwP66 {(䑌W]d.~tcz.MiX)ۈ$I&pNřiŗѶQIQ1NZAJ+7F\{(u'C2ىg`O0%j6eȈD&3ق&HV&N{ù}Cs# _-{KJ;) ?NN?&8 S8$*SSALha O@~jD[ɻ\Eb w\ ]+ bA|J PK E,=CZ $pegasus/ConCmptBlock$MapStage4.classWsSEm%`Pih Z-UZ@bAmomzMiT|O '?8_x&}Pn88$gϞ=w{6hǷ vJaWZ;05\x]oM9aL(B(Ba<DŽF!LI|= 08L[6GaO =L0`nN//27aSJ7ąR!-+Z:OÚuw&"Cf 9ZNt0!V, jJY"Kdi,OطxNgLc\ϕlR?2\R̄HNhYӴͲE6yLn 7i/LaX7)]uG-rE8^[UK%*9f>/2i!yYX $0j7(d`0Q=K,Cs xZDq۱tʱbdH!xt!OEx=5MkɼfC91AtR])CjZ-f=N-%p3STmt8J] ʙ4"QCzhji4'v翾u3Gdy9*v9c}u"[ʈ>%9Kϧۿ+;e,qVyy<, {8fTmwTĜ][xaʫWYygc=iYo:٤}| UGwj9>U>W$Fu0U}=8.ЍRfسnU07 ~CV שxeŠfGlpxwB'{iW{ݼģԈ4(hCdw6KM-HZAq&v[MȪHw9fŰ%a nUX]ZNN:ztz:|OxR!+ԯG-^=m'/䵧O4u*I $QfcjEqHH$5ԕ#5P?!PF}j,l` l`hammUK:V c'l\k6 `2 Jgɣ<|.łQ2Ò򏹴v#`44 c}DGo%Q}~3>u?G 1ԳlghfqqօӬӴz],~t4?B"YtyG\:܀9tp7 *CCF3ԁr)ڧ(X+bkb] =XYW){av8JGMd}͛ƘV,$pkǪbj!ȗn`3u?䮌ikg9N~GUMe*y״R-vU۳>_GKaxf2l^N%E/ӽ@ؚ63D*N0,.i2\RRbSY֌"1s~!UWK)k&شu?ɬe߿tI,+ʆ3?IIAJP$pqA: .|4ۏV*^'P~~><#| f@5>O!  #>|&a|Nx |c\_OTI|s_ Oiᚾ&a|=@ ![-YꁴӿWOC ,2D޴a?L>m3TuY^41xPۭg-̀fnS46jO5u]CՒ[m51lV3Ҫei4؝R]F+qTK:hwꩼI D 3UsjbHIE3jԒM0=+/H>[7m~z:5S cz#=y;tZK_٫  ~34oklNVVԳ) f6ter}ZdO8 #ݺe fL{Jx75KEgsnmS%w e:'8(Rs51\_^Ȩ\,QG1hWm7QGH. C؂ zGMXTVzB<:¸*Oa ŪiIfx ;Ig8p 1 )}1?=zG٘;yQ,Sw_w(~*:]{@1,cVh>ex1,<+=SȀĻ/yKɬX(s X.X%a(nL5Je^4>VZw KcA%xs%8ej49TFϿ:Y/š_0V+ hzм3,@R#`qLD #XBvbBb؉6⾟vl}%0${Cȑ;L% S)co`?E;̃Xfgu8f^ff88zVM҆!ZOq| M4[K6)!cIՄ>D(:n t-i =E#i14׊oPK E,=iS[m$pegasus/ConCmptBlock$RedStage2.classWYtdkq˩ @"IDu@8u )5ci$O,iȱЍ}esY M4>ЧC~wȸtrK/_؄LUTщ!lDYU(loA%Lᄂ)r{ç|Zgs >Z|A_:/ -A|Eڂ 7|[w|We,k%'FO)g\@$;ͼGG!jK1>e1h%nr]lr͢@谑vC@K=ՋE#K؀=~NZ)):hٙ^ГFl\OYV! (#/a[c׿33 䲔쇼dV,wڦ#-,٣Eǖw1e|&6f>C*ɓ<Իm[4Z* &KEѓƴ'Fbֳ%y5\gU'lXhdu+T:J;UƉ:,5i6JVJKS] wP nlodd+ EMvO wĘr3oٱVtr^pa|OYx+F1|Qf&;nL_aOF]q~o oƅu\Ranx@^aы+Lҥ:ad';L緞m ѣ~G4?ѰQ?3\`⚻dfStyV 19hW'_ӘK<#iYú`5P84uUFC;@wC, F I:?N.+q /"N}8a;nYU0na)a|zTF?vrOn./p;BkanA>wHI"(-b= ~wS.xA"^R]uLCTAr*k֊CUpRPZKhS#-tAѷe߿-D +Hj^}7e}3fokOE\ek;Wo03w4ugcqt]wE q#*i-PK E,=ڦr $pegasus/ConCmptBlock$RedStage3.classVKsGz, Lb1'AVx$c 2^Oɋ׻[p_s.*37rLzF0 Att}/Ã4t|ۇ$4ӈc&˘dN.̧чiΐ+eF%wU0XX6VWl T\ܷ亩kV[y8/<mx΃(0JSy5O /V#=)T\npkX㫮k,Xy7mCm7G(ڶ0;[V䰨)m fsnBr׌܎Ȁ^1PȔ2d}Wd>pjm0 ]jnC^rk<}"Hf\ͽ&-Sx:ԒUwxgO~v]Nfa,.Ñ/" J*ʰ_5 [1hkڗ~-0%D'~5da-c?`Va r#0slNɩ'iͅ(m2HJlr fd=8M~ <!5t|b`8b8bjdEfc5M̍rc}@g{JO6 [sx=#M9F&cHRu8)lC+PK E,=UR6pegasus/ConCmptBlock.classX |guٕf3%[>eVXöhw$-ދ=+!s&v hK!`I[8 G9C˝4%-%-$073Z*~}7O? l *qNȈBF|Jȧ\򤐧<-3B>+s*>@9Ϊg&_dET|=ojVw4|7gV|qVÿ}?C#!&Dz ;C~"B3x?_h_Y~^(N5*A_'i NjT$NiʣRJ͢S*U V 5R9*hJUZ 5J5F%CE|fZJ:HgDJ7LT?)X`|Jex7 t t WsY[Ӂ֮M]; J2V/^,%.֣Y+Ny[+2hadzL!~Ȍ$LH(=jɴqW(ZԸeǥd.e]YK $cwb`DZFJc9 '}l/O+lewwFcVpC6ux%%\GajS8+G8;m_\Vpp@tp,i֬07Sq",B 5EUmS8F<Й̥Ö8s0tuEqBǝB KBr7 9c:*>!'1vdDr$>W.mJd^9RP)JjNzpPؔ-Ʀd>Y*O %FriL+)LHy>7c.Wט<~'Nh=iy=;Z:;&c5k;D-%m6rbpR k۫9\4KelÃ_ـOЊSdRNaƣ衕kX9ݣcQ5 ՜{ ,Sjajw1vnsb1h}7[iKVUug sX,Na1md.wJY NZ/IC.dwL3ecqf"k,E-P3V_}NJYdꠕm[#/3헙^{sYػLh|9_[i  _oŬ,'V/jsLˇ1Ay8J6LqJ3s>v+g=?TwV^קWOw7_| 3ś4^5Q ZF`}ve,I;HQz)g=[vk)skmfb{@Ҧ˨RaŴD(?]W.3s$9fn]/UVdq^Q^*ȷ l~5`[.w7?`l~.']~>V3}KO@K<(H )R,D ) !C_:B>y!>fz{#8D:} a] C83./<~GOQ)]jzS>Cs__8}:N_S5:}ߤ[c6HߡY=+e!PYF?R輲^P'J7T9H?S,zQ9N?WN/ӯz߂bچ|>C|]) >2$%C|/(|AQ\*Nt*> j tQ}Z{$ը<wnѮK -T~\Y Uc:u+T煏$N(CWr w?+AGz,96E hWFG^#@1RP|xRTpﺬN#.Pl:[ lAYR~,E#* lUЮTc29) &<~2p$} SPجLU$7O`$0[YR,Q!,MJ;hSjѥ ٗG<']$2"( .t rGe-Z##-&V0mUЯ4`1u{`z,YS5aLN N\ag(6A obm p7m/T8 qGF)˃Vfn3 *x!~?xB^}6|RKAYM26b;82kyyy<070O3?< ,N9{s#gPK E,=ʎb $pegasus/ConCmptIVGen$MapStage1.classV[SV/"&u&b p^077Nh1ڦ}jI#gȟ{_b2S:t0I/gW{v/ai4\OaN<)|) 75m *J*RqGC4EY嵻_k7***TgPf, 2C9{͍*̪M[3eӷw[Vs*l)8/fpR+{a(Nq[9w@7L\陵unu 5ʮX-!MR_׍6 ?r{/_?O@Ő|bMCR h`R~U4Щ6k'aNèrd kQokk3U(9F>c0ec ڍ:Jb5S4}B5>\'r5gb"$6'v G^Wڶ7byS=ߡonԴk%g-P$i w`07@V,A9{d-ݥ(}˨M!Q<8}vT+<:x[v \oAcA"N ʈ&GCN%pN}q(cFN2?Jbl?B$lAV WoC]*oHmg6zWgD>iߗUo?G: )B):8Ҙ@or˓*k!;p%7Dzr8O J.RDi&}`<9\nM^OA1^!b*χYk9*VchzKd^B&Z.=i|OC} 64 !}6|Y|HT#zPK E,=csmq $pegasus/ConCmptIVGen$RedStage1.classVWG L7HhxQE "XP,ږ.a awl{kW۞>lOj=ǧ>_Ѿfŀz;w}o4|k.p1KxI$·U5 ! 00"z&Ժ 6\Jx9M eY)PuдLM /l.2}sP+5]vJ뮩Š7jROHe=U&#gsdmO8^gam) R:e[#f:RxF>lNrBw\c8yV30f lz434tv.;禌:=+gߺU4RUR .Yj>Oc>ՊvBV:L#OM$%8. Pd`:JLA)A?yydzw}_H.wQ2TG rd|}&[PK E,=,<6 4pegasus/ConCmptIVGen.classW |$3; DPúJVE ( awYٝ"hoS=z^M[QDS7on}X[*#+{{{/?28#Ahx;pt4aDŻtGt^/qn7Y-:fV>v19ߩ.9߭.y^9|D}_BxI%#vkVƣ1 kxBWCInjd$qqְQ> ~R3rgigu||Q.+r@pV>Uq;ߝ @z+WE)'Wts;,07OFV"E D-SlN6!yVNv Cs]J۱ΐwʔrW BK6 (J)[[iƭ&+ʦ(gvoNZb!ڝu39g:!s%cC[z/[Ѵ$17r+>hj ŴKO#nT)9RGOm: /N)tz|_H`<Jɥ9L$R]q k1Qܥ\.@#%S7W~ol686M%bh G!b/B +{Ѹ\q6" 2h%f C1p) ^A G(qh1+a.#ϭwv,w-έ`+UJ͊1x`L/a147枨1L]Nxw\y.kfc@K(Zc"Xሲl H7EPV;4sNyc˱O@)a^2(p<υ%ZnP 1,&p b117׉yՄ viwo`v++Wa'6~_;0k~m]W+!'Au-8 DSI 7bIg8Mtj%,mWtԹfRh ̛^+m]MiӆP0MkSˆrh\1< ۂ5HE{gL]}86{++j= Wc=Bk^h5>PK E,=: pegasus/DegDist$MapPass1.classW[wUI;I:@B B((*B\""I2M3[ />TKyv-7^ߙŔf)ˇ}' 7CX \BxCAi` f~2HY6ҧרzYMT#pmR/$4])F)-sIniU eX\KjUӗr ݼBӚ[гz" JYuOBg}jwSɑ)]b-ׅ gL>R9ѳ0vZ; +]AzlPm2^TxY+xUk ^V̿cz.#ns|iG;՝(o]NlI/J@TxK Wwd=Ϸf=!堂k.!XXG2>V |*&)_(R@uf_Bd+bB/MDEx"!5Kgg&3l⛿X=¬lU]]]~~ QbU\UT\  0 Rᶂ &L2T3%"-f /ȗ-4eg}IVMfm o¢YW_I4j<.[=xy<*U8a:FֱO<3*b#:"X1 [&9/\ )=/+@ŐXVq)NrU*~T ,׹aq;o,itkVi,:ȌRE 70W0P`m<ESaS-t{jcX>%2Azbw]=WK?Zl)x91mJgHACq*! gࡆ,fYs1Cס2t XC;:3 + ÙCC!l#Q8AТF~>ߵLj= 3qWFyy /he(CsM^+} JWI .)pQjZigeD}6BաpʹjE8Q&LFw$&l)g$v$~ED?&н( ux mHNuB'!i 8KaB'Ρ*.E$!7y'/ӝd_W|:B]tF;@x qU!7TBJtb% øڈtZQ PK E,=u pegasus/DegDist$RedPass1.classW[WWL((jxCbX*ZqHa ̌ {on>Rsا>wj3p1hV<9}}n|87 UaeBC8^`%CL/ b:@#IIjS1B=Ht5&` ъ GEFjb0],'m=1*bzҲa;> Irm@+tRkj>JrfEaىau j>;A"٣g2hqNg̺.[L6;*RGJO9b6}B;.Yå.Q&3^1}Re]#]i_HwֵnN5{m9%zU-+dRc WFo\LѤʰ..-/]9yl.(/ T>/,&БӺA)"4:Wr]623s)H`CǦv ̨2^F,|=ٴ`£9XS;eǸ%ݲ٣^8gJFhX*=> YY'!iRCCflQyԛ$"6bW4LaZe 5kxo*X3>1ԑN-ȁHGL5"-;",މl.xOŲx_>>ҏk*>9Pr>YX]4|x5|o4|kv۾vGAcyiP2puaQSʓexWeS\fx>%Q\ۛ*dameIu'#MPW ~͂+&hؒwG]W*\" YufRAkYӂ 7}TcK)m.C^di+nPd.sTϜS:M3=a_BbFD`۲r$zw~7~q )Г%Q(H窂'ǓζlR:@}mT Vނ'TFoplm{Q~lV -*؎<+F[jPFh7Qu,ŊY,406φ${;-E}ӦA&V}P{ggP1m:]bݿZ[$:?Yϡ·8qtŕ@aEՆ@CC3:^%\IJ'a?A*Ia$a0%Wa{ϣ*lBaaOS#bhfV>7sU%Wp(~hr<><[\yGدEJ*:j'16ܒ4"pP9;H)iK{>O#<0Ny4FaXfTPK E,=[pegasus/DegDist$RedPass2.classVKOAّeV|"@E]^c奮``YF<M4>gg/51={/zv"cRWH@KEvDNӻZp?@;(qlZBpvXbvDNQ좳*vjSlՊ=+dt 5AE)Ey0L;IgɕJ2E\{uxb8.tGYĈ8#RM$AFIz6+PK E,=ENpegasus/DegDist.classW{tgMv  hGe7%"QBt!4 `D̮jK}[-UZ٤@kZ߭EJs9|߽w39 bȈ!+2ޭ C*玊x}nSp#C/> *fC >,|D Upb T)iQY|Ne|A5p|Qл-=B* + qT0TC :|U?,_SuA!㘠ߔq'mwQ'=% 1 #'cPƐӌut-bۦӒLsHy+U\~FV3.AH(۞͸%b>Xܛ22}%xH>3gĒTx_:z v2fqOz&vc(cƲQXN4`VI/| Hlls[t9Umɨi8{L"m)3n0}n7,[¼A#0x+Xv5d8qn.L3Md$ETJisDfGڈnjNiZt0L K 3I{,H#ޒm,6IXreY8> /jW2DMtp\C5cbíB'-c(2R%cXCO8kOJyzH6 J%FeIGOf3X6?\'^5y [i>.h`W‚)k\` $bk`NG̘i []Ok^C`'JƏ4OKhTW77wZ7fX27͎ۙ;wNg=vwO Y+GkmA%ٟf1M=ƀ:VoVdbޚgu 'Y lPGUNVG]sflQ6,,׭61GuS.ai/i~KJd Fsx+&V ߊ3=YKhE< &YQS#j:(҂&w"R#hc]ry"c;DV/ ϖ =AsoWbIdVg߰c P(jK*p e(aݫ&TʴOby-HL9ɂX2YJXk7"9FUAe9nfZk*PR+R%]m=Bu@n&, /mEB\T60vKĻ@଴CeSFPuS7nw q1qkUdh47i`?pvϦJQ6y^VN#5].ZHq;"nc/-.wTr8 %!'?ғ\,NPq1݅r܍ո #W ފr K膏_@0z!C!)AmX8gRx:|<ꝗߋci5=xAHN 40fyG}(byO۩]^ SB¬a Q%"/?+US3y9̿ȩ!jA1\~ Q{0$ ፌpMyq CW-aI-raW9\;v^/k9uUD4|n4vb.Ǔmy * Vq4a ,e1ч'qS{|WƳxO*9=K"ė!p}y\>23&~<̊az VV-p qUB&Ba? 5ϸ1?/M!~N)ՐCc4 Tp|xV_*nsf``]"WU$ _Ɣ/(H1 BXab DϤ@ܜ>`d D_z38,?BM_B^&@Ń6 Fa8 mZ_ӘSz5e1-sReu]H f=&5IWނPK E,= pegasus/EdgeType.class}RoP=JK ݜqSLd.,\"dӃ=kR.XFQ:>_{s߽s_?x *aUâLT`P&+IBKC>:vRo\ Zj3?-<^$Bەugp,rwQ+CFV+ ? N= \߱ ǽ\I1Q}27AꚮJsZU;~vIvL=o .O`[~ojۢfF{=JU۽N]L/Y4QG!/D &ael5C!;ݗ!>ߪT'4tODoH 6'xZ^v^0i% >,SF=!/15/ M9K3D"־!y USd͛2,a9sPRk/A\i<"{d&QASOmđ:B} v13]bBPK E,=t@ pegasus/FMBitmask.classuTKOWgcc&`gKë!M1c2$JU*@QEņ RMڪꢫ+uU>z؆$PKs;9w~yݏ s`܅zLȘtA_v`ʅ;.L 3H'QS]Oӣ2죚 B8 ^-dUYMWK+qSYɓ~QˊsE)90)R1:=7JgMUC1JYN$ueK}-dFucjt SZP3Ԗ}.j_RGce#rgL%s{Nٴ1/g c7=[Hl0H>J>RuWdTlIgp9C- ,ҫRuSD$$YZwN檦gyU)QH[Z1Ŋ{Ľ %#Nk#%rǫ-j}XqZ:S%-U 7HC2uܸd,>>qˍr#|d8{qu5c*'$榪1'|r<*gq jŒz>}fj jT-YʕVW* lVWs9=)fj/귰YOR<#D/z]`h$)J;ڞ`ߒ`CvK'iu G N,;+ p'4$bRW.v0oomv=[O!'˴ 1v>\1_Zo(` /Uy@ ̆%& $l1U NQIhKA;qK Y9l:Γ$P:IAJ1O04C:$񟌈C&qT/&Y;, cAg̉}fHKw|`#~䲩O-;9< O{Vy ֈ֌0"Y+Y;FXqv>]/3F]*yxY*kx1KsGe$OA.-6j! Bt ɟdOmHO0LҪH`$>"DGյ``c>lmlЪb=S-6t><=syB;vb|ϜC Y@ l6:6f6:XBƧ5x˒(6_G8$B2bݎPK E,= Apegasus/GIMV.class[ `Tչ;˝- #Y 0KXaL@2g&Rb"RmJ nu.j߳VK7[@wνsg&,^fΜ{OtGvTxA脘r#u@`; !P1qg*;N B1*F8JUт16.kƊq*wD.*OpR9O'ٸy<1Y<\i6*b&S^R&sEϔ67ok117Tk^ o}]0m7"m-V_8_ꯏK}L޼}f_:&%T4"IL[4\2-DA.HpUW8jj3N&.x[ t Ha&w/Y5 [.2+ yZœ`WJuWfU.XUeWh[[[{v/[ MrHr21f4[傩4iY*LI,@4"0U龤 W/9Z5+?68V C_kկ]hb4OcJG)vixcJT;߀)2W6ݬT-\XKr(:@t0#,;Sc1Mƞ 0=B΂P]jK 8]"jN:Y+~ j(U|A'`HukZ$:ˮ@d/珮 50BmQA>" =94kƏk?=h"vCm?d 35q|n4?Ò@485π B褤;UU٨CP0=8-T8٩2nm(/+/{uL^pPxvA3+ɑ+6}@(Q;"+j[M55؜ /C6  &wV!>Mn&L{1aoL v#O#>%,5- ٍɐL>hW'g|C%jg/ɯ?S}Y$LBVYkN%E_V{5yP˲-Y J|UUP$*Y_'c,OmQVX[:t\!UR = 9S3$}W4˙>hƉ* &R`zr|asj,C> [w7]-}T?\gmplNU{mտ[A4ŷ_;W{daFή0Zjo8MQv 4j䊂MǸ6=Bٽv"F/3̮*RQ Vi>Bo=ȗЫ¿$ S6PpDܨKTa\¾ ?mδk|V'&oE.(?|NpTd.(Ei̞'[=E\V_a#.rp.^ȁ{r3MXϗx_+x䫺|#oװZ`.\N~ E>I[|7x [^Ń'ê7u.}-.ozF{G{]nbh1>9v1Cv#6z[dmXiG7"xyγ/mQe=1VM_p `̼$ZK4^.UHT^ACe+D؅YO&F,&#3,K={|5*Px=SAoɓ4fQ#sHd)G çUNNti h-ķ.EX\u6m:H}dWa$|Sm.~zd-va$vIJL;ʬEɹmq[ktR*!cxY躣Q0a]J5 R D0=j V#YWQrvLB52!>Ĥ i&Z*+PPr bX~/vzK ӥ4afx*dy 28C\ Ԏ?fϏ?L.|pYeO%{%@IʹbtRCe$Ozd1zϼaT|hy/Ol0Ȃ~6\H+b`~%*cFc&d:F]6ӵ@҅9O:fSmznm,]l/๓#@Z=V]b{^PDn' \k;Ҽ*cQlȄ4H|?@M-*WE60Չrːؖr):=8qr QNJnׂ{vO) O^gL F)?%W,D-aE3CAXGh$YD?Fxi/O;P=O=(age@E)D?C92}A1z_& ͣw%Px9Q ^8-<^%px=<݉i }[x+/ \M2bt CЭІgI@OVGW+Fr>tBq2TwA)ߊo; 48>*QnҤy{R2DMIuȒ)ܴT2t wtSYjL3 xtoN˭N3,;H SAeX,X* (;ěAe1W4:^'EF'-Gb&Г#i >?R?$>F%ǔcr%?+25Gn +_Qa:s`_vdQ6fvb<\ʳ8/t^| 5܏G9/|{9[~D"{0G~D\j~&KE<+B̼E/0xvUFLzvcu*ѻQ3@2Fx ḀNmɈ[GuĈ[GaeC_S`Vd,[D~JF0Gds DvRޅl"]WAnmm76gmq6㬍{qr4U%{ jW,8xB=S-ݖt`]6򺠰 jEPtRګ Q=>UMN\#9p[lJQJN1kGX(Ez 7튉0a"qGZfF"CHz,FzՏaj)Uf6*!PE w!u<{',_4F|9D4V< / u[δL˻@BYcشxEH ̣w\Jy< y<2i!'r.ZBkx\I{x&=:Q/@dZH//0wq̾>(|NPan>'">я o.g3.JS)ҋUTdҋmj?n8qw|} 7N*c)8p4{L\g~}o#dUduߐOHANZK"`+SMe6 [ ʭL55t[+nrm@`6䷗::kM>kpigoe||>| _#qMMknzn6t%iGh Gi+6nx-ܳAL X> ؑT'⪺# Yi*SZ:AI~#=-M 2]:6鰲JXv^wy.8tX%H!g $ !SEhf{/fkۯl| oD*fZʷŘ(ߡ6tA;NzsFb4Zm"nlO,_C&T>*ebdرLvw7 2H2n)?(??LQEXHU/5_)7cM[tTPs!mvq5/4^iDvkh,( 5}~SM߇z-Uk*vR pOnɩl7% l C'`;OvOvO ~ngi~)VST*IU(4WI]JUPdΕ[Cɮ­g&ȧYdNjl?葨AvX˻}+m8YpWDo ~En qֵx3(B]sDUD&MtD5bD=dZ7i|>P.z(E@J Wri,'*\LP e85bܬI}6N!*YM5X|ki6(uGrmOPXd2W4 Se5*f rK sKnv=")HUJcRjp7h6}ˑƨEi/˕|NrdyK۲*\n@pg+=KJӽ)SnDO!Q)/־ _—X _Cb]dͷ$|G# 'j*ECӓ lJMF1u4 %srԌ[s{?2x=J8N+!=(iM|FLː~=aCx I_q%QiڡU2Lh2*fpFͦI!= ))%SC1eLSJcA=OP~=̰"}f#$~+ٚH{Ӛ!|گTBYC 5z{q%;F*kYs$4i H6=>B7 .^ V3&$a#%N8xPgQC0Y{6#Éьy^\̚RRƨS'Ľ5Ik6V`ҾnZ>W CS03Hs1G[wŸJ 5<L"bXh^"SO3c*t1uaM-Vbr"Zϸ!]4*OY*W06 Z6mȃ\Rb5(A\s:aW/q0`b%4VFcpNkrB3 D_MqPP1J!/?l8 i9xUyFt|&f'O?>iF]^U AmF'D%C PI?&glΝ]_u:}.[{ID0Cmsty_cuUAO~[]ޠ{/suNv_PF$l*!:+xUa8 o" 9w+w퇰)3XUAP: wwߦ{aL> ?w_8edڣ6 ܅49Xb9t]RVaE~S+LXabr 9qEE' iHY ?z~ 3hg?~$J!9V0 ݜSHVQPK E,=3L#pegasus/Hadi$EdgeType.class}R]oQ=]XX-R** m'Hih$!6Jt+n,ͲKXXeb)x73̙O(Gⱎ'jHj0P0Vt >0Җ42hoD߳`0-CFbpy.|Jx˫ae/o-Ի1=C!b>!b A<F-e 0TI1dep ˆ"#aXEl $L 0,ɩ|ci)ܩewiPt~(Kr눚NuH٣uq k6Cj;joaPzt[Yն9-Og2fp&/PGpR|&8]q^AV+h@GA%<` S8`{}lZ\Rchw5*xOጂg< $Ë$Pe84.%y^V ^ehZ0 6!.5j ޯúXа "sIag&f6Ի3ߺ?\Wϫ٣pG6Udz6D&;G`>jN<"Tޖ<+҉>Xv)[!>O訖cӜy@'ͬFa:|di۾Դ<ʼQWnd-~1FF?tilTg)VGUֈ=>}VXIdžS ,Z[7T iD")աY,*@)`%-X̾D ʵlZ[ђ6l1q; O6"Dg X TWy  /A{u JW^"׶]Dt.k'"j:q J%aE[Qv9*Gwj C[{*)odWaߢ] X~@'p3ΰ_p 7~ eUt3U~#-$I"3M/n& ٝv!rOI:ЉVM.*]"$ p Uv_CDžڦ?"^"\$($#.c˨mrAa&6-~nDp W@x+pܹ1Vc=BPK E,= hQ pegasus/Hadi$MapStage2.classVYSA&,+SՈA<,d*,.[ xTY>`ɲgaXXLt}L~ ,T4aDEn*b9q)-qwSq  0dːpkN0f |&zw Lo]BnLcAN[~=u'ZQ8a[G7lqFmwavq|B${#k[H\KV.Wo(U>ByeS[ I9eeRh0iTXz੆J.+PrYَI q6ԩKJZ˛Ȼj$m(-ᆖi8 .&BJ 30? 3eJ>Y;fʐo}ߓ5v8l؀QI?682ތ5mo;Nˏ3b?+A<$#~n<{X{r^~#݉?c;t굱2DžO>YOOp"g8>Ok3 M+ۡ1fL3ߡ >d鶚idhvrT1,wv&VŰ|NOn ,Ҍђ)Q |;tCw1H ^zPӯڽőfz"fZT-]y,Zim.Pv^A^xEؤ`3(؊-_Re| s|Cbh_=E=}ct͞?1bP!6wTP8DŽgpW- k g|xN ~/c/E?8~.tBx~EBUKaMs}>/ Y Z և !͹ŭ_ΜʟWs &'1S[!Ŧs)#f>4rվ$*0W狼TֹD?/r.RegLӘ/Pa۷wDhZrJi8sg (B1Uus4NAGCC9M\)w5mv5ot1-".=SR뙺 =ɜs!v. ^aZhF(AAJnpճwWTL)oZ@,:whm.׼x/8D]"",Zn~[K|Գi}5=y^42T7Fk+!y QMJYc wΊ3[-KO ĶES 8P 5OpL.!RB|3_Xy=Injڍ2^JAlPB}$ZB9%4uW5Wy;ɀ._h_꒥jqKdT>P0{͂ţ\<]DS9sm]%ljFn͡kOk im/@ dtWWUM5XkN`=OxTE4qvvg"aAy$AJ>4II2R5ڤ z0jqPCJqOHQpFjy O~IZ1i%&[pUYiPc!i .Az Azn$6V8 ڇ{)ł}K^"p'P c><+8#B?{"x=Qb@܏ LF:9p q#ʑH3&182$K.|{qIi!)VPvB&c%.EnWnm ko6SC> 2t݄q 9!PF9DNg,*6q-tZ+=PK E,=S  pegasus/Hadi$MapStage4.classV]LW  J)VVRU] ,3ۙY_mL4ij>I&}O=wXDqPs=s;.V⨄ 1 aCO&*a) #|]1ǘbEK@aH(Q?\:7I!"NHX OxFij|[5] `>V# (kڙR^e(M`H(>8#%p鱕 @ulM+T8QSb{a m8dLF㆙)%1Fadbzꀩ|w^uv"ueLn5i5a],ՌaڪYs`ĠЄѿFqeQ}r&ȤeM6*ǕXZS 9, 23cS65+vWlǘzR@"=wAb5,%~3{01# 6Tr.qf:<|s̲MN-*n,Ee;8bm 1"S5'z{N) "H[3tK D-+v֤`_߳.8-[\4uMLl&&fovHYVلS\˨ yyw̓zP5NB~wXetd3$2^qҨYk2^"ޔ&,]򭪫^qsw[0ٝnY- ex!s8/`=Q">"`/`=/ X>QvݼBYh2i^sShePի/)1}̙P??JCeێfd Y5;r]ض?H(\|<+zR@dxdykQM+IQA*߆ ,O7\F_$E|ޗY9sHeg87Y% Őxh<\#5IEcMR`(x#šKAǺ<\P 7Ip \ K&E ^x4%S(m.F?eL#?)C9T+"%a$²q2B/"օJ+Y7Y6^4 GB;(&αA\` >e |LU6&xٸOPKIDQG-l$]^Gt%Hc=clowLq0ir3{ȺZϥGiE ɋ#ɇ~WD%~NO $IGa}]wi{B/!qO+~c"H`:s2 It;L ĩ>D1zԬ Wxrx 3h}N%pG:8Is| pPV@Ob PK E,=([ pegasus/Hadi$MapStage5.classVmSU~.db,Hl6Ph mĶڴ(j7%,,;Ƿ Ug~vo.եd s<{6X:8j4WQ8::X :`2>2 C b^st{f8YN1.Bm5\Lgw+u/68j>q1 jl)/ymrׄ)~NAʴ]Olzn@ X Rw ~/\{E~=($@iY$#z3)]xƄ\To $rBIZMp ]Yx<q&r h!1ԟ0SI%cŧH]Ka)?<.'1&9$DZ}WPF^cʷWÂ}t~NaN?DF/dB-'R;%q;|wᤌ|}T>&||T=\>W{R|pJ}2/8- b43b$5u!SKj \34!:i epw,Â.v9}qw%j|j꼟tۃ:ݭjL[OufvUu%jj*B c@L2:ܕ0cjRjj"lRx}4[8,\3T _KF<3px}iWfsvs>7x vJ[j#1WzЕ{)3myy%{- pͼقkDU0×D^蚎k/kjZZ',D{Բ!$/@@6-!~ɤfպv^(9I ;皖DpQoDσ$ōY\*.')Kt w[ٲUӶ,&DE.XlϽN|1!5 ],48n:ҪBZr6Ug Ȃ:=O&'BIGuUνO=ڛy - wu$|rs$a -p?{g<_MT\Qm~8_7@m+@PY7Vzo\qqmieG9ҸYwY@k%ixBR㢎7q?JCQ^ 4}劌&9  UeXg`q0Vepd1rZl'WMBIh]72pj/5Lѿ( `_һ 撱ɧi`3PsY\g Jh7 x Ocy; I<OѼo֔+|(dfym )WSÍ z&N]Y_LM&%hY%:`' ,'Sͥkϡa@ q@ chέ\994ӓ{r8N~ybu;!pJ!7 JO,cQϞhe=^6 Fn׷|E Ӊ:(&..|8P@G ntV\7M{]w8pwM-{sa<~< ]x8 7ra<"!V(:q\:LGo`ZI=ݒ2-̈́W٣5l 鞰ҢzWF*ܧz,fDZYgjD931_(,%;n[tܔ7inߪkO[M=Fyb%f*k==Z=eYe|y&Օ2)e=̿C$䈉6cLB,U9Z'aG2F2BSk,*/9OH546}Gi1%yj'I%w2i];…:&IW0^|}n],;>-Q,K)2.KM8v R_5J;w>`6/ ' $%uyۮRVyBP!< X׊m%YNxOR3}ӕ=aW]#[Y7h+'ri˅M,C˴LIw*Dѥ9-]F6/rɱ@^ŅHEK.P1ఊ#xAQp\`ɽpY62}t 5Ռ5֨^ˏYN>VQW36?-WTr "5xMsS 7~-܇X;ߩ=xWTӨo*w^U/Ps K|8orTRYqBqZ*֊3s-v:{=:ϮBԹl}Hs)-y+(%Scņkd]SUK$,8#zci\6%TU% ⑚Dr+rRz*Nu/{kmi&Y!g&)@;HD{R3Ã:Zpj!ӗ͎lwwB^7/u.%:W22Xn\V1k7MVjꒊxj+Ǘz.$62i@vOq.~Ն:JH,oqVm+`R=?Es/%V}f6ǑΓ{n^0uE&'a,gbk`%Ur\RNuUNƛ0,y=5 E^ż`α?+&o>sa8`EkyUrWMa>1kb[aUSGm xcxbi,>-*Rt$)˙}ޘq-; ޙ?m :#pȪmr\SԎ ] 5jKh=Zhi#6&Dh3R'h kq ]A`/Eq78Hxz6B AIK>7% S)+I v1HwN2%>Ňt9i^JQOt%:@0Gq>E=&; ˸ǝ=\+WEW[ Y81U=;KD9D;hXt9yx%C 2n5GMЕZe7eh-:dbhNl:Y| Ilf )J=7799C-pqZtYC:?:?u0B*VУWA\V?.$E_;g9Z '0s"1ÔO!kP8QP?}?$v'1I4pRS(kg}U0s!֨M0$G|cy/n~gܼ\neBk_#ZoW ]vlSo_7.>S"PK E,=lg pegasus/Hadi$RedStage3.classVSU]6&M X-m%- QRiUld!@Wg:~/3 8|3s7!Ќ{;ܳw:0M|ė["B #a>"$HyN-pgUch8>ǒ{RW"$s-]P* g ]*lY)삶" B*4nh3f/(+LU,*5f-ClN+λJIdٓq4,XZIq<'{K)ܳ=JZ?# Iv4xncGs 0k>\kA%o\iOdwq7In .PU$1H$kH#y^0IZuPK E,=0A_pegasus/Hadi$RedStage5.classVnA}=IX’@gc $aqKXbp)A @gr,Q73 !Μ9r TaEU]]^Uu}y3`31Q 2H4(1a\q 'Rcm <{!1$CKѲt8/bB3닎)*gd"X|,,GmKo"|_Ҧ+}cRa"{NxeC\Ƃ(9kXQ˞P $0*F!/`עpɫ1nL84n9+]#yE>m5 퀁(%7-+#2K*mҺAb苏aWǰ+B쇋 ^vc0^,ysJ53˦t˱} '\sVAڧX7ݺl 4-Wo5 ?;FfXNfW/3sN򬥚2}*:td51:,CJuäYp^uCW'Þxim׼2lʨ@ԏU>b?-譸J}z/˪%]\A5N.n'R,3cx&'$3E9d{h׏Ҟ|Kh*X" ]9w=>a?@Lm&G!kpVXub+P6#m(?ݴ74QĮ}&]CgbЅ=@J:rQ^z[O>k*I2O\758\5#΢,kWC(?8!l 1GPK E,=IV+pegasus/Hadi.classY xT?gyo&/!'($$0JX(D$Ԇ%%ĶnֺTK}ZeRj.ub[mmV{o&/{rιzס?zqDžE>\rQE1%\E)q1^ >(FU2U}X^*>@&3A  éX4Ta NWV: U,A5*60=UbsT<s*ɛ-P(X"U\mh?ƇKqZ8><W ۹RULWx [UoکZpѥbٻ0T%{UcR$OV1bDŨ1U<׎`T1Sq+.?gnPq@S}) ~1gzxgx?ྍ*Pųy#X)x?V!窰STإyq02ȇO}x ^&WJRjkUNAI \ܢ- ފ.5 %5M@"a$+d8<3T/hEBQ`w0:g8^l-V,ho9 [׊ZLF3KEDt$&.{`Qzuip/wElּjE ɡV,O%3x;J-J"@|-ǂpy%KO ԅ޺d<%h*EGc P0 ŢdjvCj%Mی_oDh#:BH qc%"BQ"';{HgX!InX}Hd31$g37 %#idPj "*qɾ/:LhUF!q51w {i?0Ir AJ(x#2AcK"1aWr`nOi޶+B0L@-ўI#H`C'mR; `M{{O&7qK ޮ މ0׈ftw]e^@A[{-X,J;a-ׅLgbO d٠/<}kvFI=2%8 Pru  F03xT g(Q8HJcўPo&̩>5뎎u5"lB#$K4 ܧ͜foޤ@ CkAZ2L<Okp/ܧ6.\lbx. vr0q8Op$.c,T])Y*㴚pikxhSfc|}Pt#R]划ԚzuF< L,X\ƨbN;Hdk+pé;ILg՛ Dj7T؂S.2:Cd$s@|fzjTf '#LFɌ0DtA9%$٧%cV}۵r`bngk07䉦hwVB=3i!e1β96fH53Eas(↞#CQ% ۈOGr_%rM"/'Gve$KFgOF|mzEFuٷ!Xka58Ѹʜ;M(1؅C3yJM\3a{!mO$k$7NeJ$hGhGWε6=sa1Jr}9=px`ċpD?%]ƀtAa0?˪0{ )M!"_30Z(c7cWBMN˃+JGكcS~ڞnړ|=t63HQe?кk[.%٫c? {=D8 Zk{|'kX?d{ vΩ9;εp5qh>rʹI!^6c嘱i5|lv4u>Ħ5{l\޹ճv>쿷GK"$tMX}66Jlߪ@8eؽE ֝z^T@DU~Ƥlv~>`Sz!{6}ئ5cvq>a'mˤ@떤iuMowiׁ2wT퀊jQ0[j Zjyj)VSUl /njh6Ԫ"䣪bUIi|%q5"o}AfGwlfi [% SE|%CsXRT]W_26Ny|ʂoTjJ>OC]!T'<,auk]o>Ӫq}+r(U@ W:ƷS #v06fS]Oa.K>6Ssn}DVǼlj +ncyi8jV mȦ̓F\a7Fh*iKڭSnO,д4S IQ^ˎ rX{TaNki<ڇVXgj&5j<͒VR6-$׼r:FȷTU㼖.5ZÎ%8 q mT}bc'BbSP-X,)x Q*2\/^;īpx Ó xEo_Ż=|.(CvY}?@|qb\|!q5Jk>)]1v HLyzQސ?B׿dsY,P)BE &2q_ʉb3T8y萓D@N!9U"e8ONexB"3ī^|%gH!gRb?ނA!f;ܜA<5\];]u[wT=u>"~ OL{ טT G} UtdDv}< y&եTX ^& JR"Tmpo{2nS(\B6"A\ -0Y w\jµ6IQBε& \K IA fÇT |ŗ0vEiZFѠU4x3'Lfb&RdBoD:|!ژbeyۡQ+JNICLLNd&'3A9{3ʌJl RL9]̪kmTT(7C"uiыފ=QV05P~YHTN>kq2)K7dUPJlJ'WBy<$;'r L@Zvcr-<+ od7AxO$Det+Ƞ\:MwA>+&T݄jXnB5v'avn5v7\ suy,T 7ռ01\ԣ O W)n"V Z8 y*E}_r< %BCr~[pڲ3Ԙ?c>Y#4F965~8; ?"rN~\~/?A8/.rΫqi pQ٫z#/y 8T,I.8_%p#O<?0 [d3dG BzV} .CLlHK Rwti`l%w`%K 6"jAO2l|"تmت-ت-تGc\<| _^p.er:SWf nW9^Cwڰµ&X8 ,W,f  ,@6 ĝij/47 MfX&ov@ NÀΔw9nP:g8+" 6KFqUTV QCa5ĞtP*rtyfG)>G'Yrȼ$+2ˬl;F|%5! q0C( Ԝ`A>OJ@JxP%_UX _,ojC2GRFEw[L3X{a"#K/z3yća `>*d|\\w)[ȸ7Hi>)>q5m陔lLK*t {DK&rV 3A Fڰ$"^ DPe]~ګSe&Tfbnڃ)Vv#Dzl-7SxΔ<+'l.N:5( 3}H,ҽ:eZɘzlP0LlDX@lɛBZߐ/_22 ;hf,o [(5jmbq[4۴Zgҝ39L7"AGxiR)}Dz"[KeḬm ;2t2c[F:uAP3KvƻK } FRZ+P fNB->$/P˵Ջ0fȗ\}F/a^[Pxx\ ^ܼX|A&OثcцRQ,̫iީe:^qD\2>˘z8gDY3H5 _.l,vSZJo,.sq$@Hx|~tdvhYOsK/Ф/8 =fJ7"+ aZSQ&d|^Q|AU| _V p+*IXqi^vԀ(S+4|}X IX6T²pV5g^hʃ*N`y$N8$53*UIOA>EAgP$*&8I,K|Y~y)瓨Du?XPjB Yn T5BaST&qkr"!yZa- @1'Z!D Xgq%#_1tha~} Hwq 'Lϸ=+#o">ψ.㒻P0V'jtb #m؊I#H3ɡŸ@)_ӀpYхN^.'Ŭd1{ PԱTH9%OG/ěm^K/Hr3%߅^u?Yd^F7d] J8E"sΤي)6*#!c`UO sn BwK!(ipI;sLwq8V7Eq}W?v~^XD0W z#'*e>Wg|#Z8yրOPK E,=l25~> !pegasus/HadiBlock$MapStage1.classVsUnceKh6`Ѥ  DZ@mrM]vwv7L|7yI0<x6Mݶqp|9s{fbs*TcWnpK+OT(TAQ:9()LE<֋Wbe_(XRMݛeK y&)6 g(ZUn,qGmek.CI`™7 2.ڢݦkzް;9 ߧSpW"5˲3)Zf}=-(6 Z7ozs炰-NN3ʺx0`z: cn4SX~ ݃6dBTG1ec1Yϔ=G7 '^m'Nc]'=s44KIR3Uש T0ܨ -UB@z^ӡuu٭^)Q\~$=_mv i˔\0~TQkVEd >_nԲt.{xgMH 4I pPjxHkx ٣+ZǪʐ<0PS 4|:ɽ/3ԍp443;3 gpVͤC5Åp:2$L#WIJJKͳvR!wTy*ȷh&}GȠT=IQIYcOnv_q@P$D;7oWvF{dt =HxVmL6]BCOX =H22j!HF瑟ނ~Al2T*7O..sR#4F)(%:{c42O@%LDBXa+s-1GO,*F/@ p U>A=A|dig ~d9crӴd"FoPK E,=hX !pegasus/HadiBlock$MapStage2.classV[SPmI  ུ@-PQ*JО)dԁO8>qOZB:8ݳg;{6g̫h9qりಊ+ȪaTUk5\W0b&P0&CaC$90%7oX^قpI]XvN'ʈRk`i>Z tҪU ЅDlr"%fU4uDVX$kP>!Nhf -F.@ C^› X]-zq(a!pCp6M:(yq 'p):M|$I K%iLh"/DW'\ն.A k 8.:H ,UI9t'PK E,=t->!pegasus/HadiBlock$MapStage3.classXktTWNqg&w&I&0y-I@BCC "&{wG}૥@Ѫ`VE]˵].q;7!dy} w}66чr>`‹f)qރ4>›g| )| | >~3K룏 y ܠ/KxA>\pY|A8zBM2lӍT\HzW35TSJ+|zĄfqL4IT %VeTu2M֓JbH5ǥ{^GqM_TMy87> >]҃yqiYa)3`J)e;٫iѝY>eeڕyi2+ڰms6RPCP7I6:F!{(0¼JN7(D`WsFBI+i"]I>c)MEˑ24(`R<&҉>hd;Oh#Ö́bdBsG |=㬺jw;W ^x.߭ \pRV2߀3dYO!z37D".R.b *E|]pziʩ$]{"Wxs%@ǔi&e/Ō[6-bFďc?~*u?Yq+o ea~oo؆(REgpOgB?mWy {!(I0JI?(/zyLgqR(7\jUPIq<*<.6TΡ E7P'UYV1m\K7_琫ް >Kk\ve)fF[}o67-3@Ug᮹L0H+RLX/`QX$(l9haɤg#?Azz{GYpA.D-WCozl~ ep"ij(L(y@ wX()SfXsi J;g5.޸YFm \37:h],@' EOA{搐g^9䛛Sy5WPyaq<*ᅟQ:d~ˌQE3A#COdF펷Q Q)Dg3P z^G{]t®e]ΓmߑG$(_\)+T>Y#-v^?x65?t$r~iv.& ݭ秱8fWfr"P'‘ daw[08uw캠wjX_ƆJl9WɕNrm"5il&x¥mk$B lJ o˫9p"4|(ea uQVMl ZY-v:gpۀAMHzl3ΰ-xpX#^bMʚ0P1mǟ6tmgQv?ke햓g})re9hP3oA"Ǔ4ꥯn\ćux0xhQ#:̑F8LG(JQ AH4ipǣt@hNy^B "RG x\XzCz;΢$CdJH_ :.]Tˑ7nX^-fIJu'l&fFZA;GXш!BE}y-3kZp,(ui[&F%H/F? 86$0$`QQ=*)1LKxOJXi )q(O_p_C>iHX3_ku:u%Nj THO28fntudp1BJ5+bH5M >5L(C]&n( J~avJju7Mlv~OiVոd5N3td%_þeɲaнr)j3O7O85˘Tu iZ"ZuaO^[W۝֭$j%=I5c۶ky*w8^6۽Sihi^xhDeՇxu975A8뚭}a b YZ!>5SN#Rx~s2O1fZݭo[Atc#ǫA9x38YжU*k~ݍ~w{b~? 8Qi)у1~O3b " /;ttI_rKbD}/Uq* fYw.s52t,$b+GChҝd{;# oh0.J w|E,4e;^tR dK89C$lU)G yE6 /쳌9Zb6;8DpQ!R{y"Ӻ_*J5gNn z.q(WaFnlܤfk 9j*.c./6PFXONhJ4E&+<ݳ`牨fa ЗlmE@wF{p_x/'+߅@ Yh`=.6f[jjYԝVAdU^`*h+yw&YSo!Wz^QXWjHQo}Qm[P4Âl6]vQAx2B),Jl[pZrDNGM&Z3}E>{SȢoEZ^~ VRiKϠHK-H'3]zQj/!:*-}͛/a5ˇg֎!KB6GvY[]8f $a"X&Mb+c+.f`d3qc,V4;(ΰ)&{ؓ¯1o#{S KЋ~'{>?ٟJPMCцxۉ>j+؃$;1&|$vѪ3!4S$>K<`7i PTuz'0EVQ{ H"3z{C䏛0ȮQ^tUO8!|qt|8gR}JxG9?.„L?c:uWFNh)o,gCV;]=TDHcX9Jus#V/Tj!̦rDxb4kj>L`汕mPK E,=[ʼ!pegasus/HadiBlock$RedStage1.classWitƒ=#yd @oD`me eGT`bBZ1{QG# I tK-]Rڤiڦ0tKto.k{iF ^'y{{߻zgAq,qhxg.+ >/A☂{E>W>A ja|D4e|4k񠂏q|"O!O>a1Z#bQE1%'2_ 2&2fl#2cZ RTBIXI#>`:i=7,v e%#fP̘: ֶ~ +>ffaRiY =կۦ&ΐ5}>h h&cSz.gpi~,k |.MO*y4aey,{0gĐғ m$ۭ-0Z^y30dV8qئ#cQǰuDzVғwygJwY˦@|Ⱦm"q8~])=3sl33H~Aœ}^uYvL=\$6q7L\BS۶~$fĞPeO`)'n9~qě dCz*/bыXqϜd ]bS¢PNGi&bZ#QHE]=jyas0;福, fxjCbA~m.%behDR³S {Gi󤣢XlJJ:A}VN[LqU*nj+ХbhVK* 8bgd<,%ᚉӆJtn-^Zֶ6Ή;fwcNg˘Rf>~G#G*~gU<IЦS x^OT?#I_W*~x,7x٪TQ4ŸU/qxQB˕|$x (|槩$,YyyfMrIhiIXpD+{NpS[*X ܒɯaW$יx#M%(JAfG =-=52, E) xp^kteű.hlf rVR)L&鹝n93`=Wcry42U-wI߶~SmS35@uOA߯$D<ΎqL)̧'x))xa<5WwNecDAS*"7M miv9QL {A'NJYIV"uvT8N:f`IJ̖S`^h4:y'7ɚLImǟ|g ,}W_? }=GQ~RiT')W lx 6x#gYD_ psI "jF!  Jy PD,j*)TP0b~ ̹lw".,E/\Q(zoz@D[OŵR, )RSȝg,!sG) s .'ҔQ̋TRF `(/u{fjrSp-bǵHrz #ʲM9E>h5@; XV @&z.ƈK,YTZ0$biN+aohF_v{BѩջN^zJ!1ѽ{D"[щ8F ` 8!F qGq^]|ſp{]7ʫ&ia3(yof30cw!]14Ɗ/2 vAz0(^v r?1jhZLGq;*)M6z8- ۏA-9(SV[^YuhMT&˨:@]< d e2^ĢKX Uư UF֝#b_V lJd3k=s"Ϟ8 wGG2rոnu}zqC8\b=DYN}%wk=(x}PK E,=\^!\!pegasus/HadiBlock$RedStage2.classWyxTWݙɼY^ K KaQ'6ZC%e%yd23}&"[]E@mֶT;V]??4% hߗ{}{\a%A xև1/(ح~| %|'?~G|xO( O}g$5rij |넂 S7d̈h @$zgak>_$mvZH]Dsz델ampw yQň.ܩuxRޒh]i}eݡG-Go:x\7cZ*ӂޣҩ-QJ^nH"mMr5ؒ0{굤{h"גߚj&eѹ7%Ĵt)HԇnӰ9-{>m0'Җi9鑩)wɄIb/xT$2Щ~Yy7m)WdR)o+=i{a/TQ4-F"?CiP)tiKoYeh 5wzC`4\FCJuwvQΎdBѝCjAntt43Rڀgr(趸/y?`n^{_rP , N}вӎ$ԲYmZݾ|[ ^TGl ^bgd6Flh)%ڍf]o_%Ow1nKL{r l4HF vݨ #/XM'fDӐE_VJx֩X*g5XeigT 㬊 0ݘ6bQw/T5 JE\ڪ:Vڪgr(U|WFEXn*^*:ՙ- Q!bb'R#H ~g_*~_ ~- <K]'LF4+)eK MX]|[^nZ_cg"w*=Жo{Sn輾,8M.=')[%gB>فo'+&L@ݴ4/#N1,ⓘȦjV&=X7ɻxvᖦM]T ә'[ezLcC3ɨf"7V's[' jfmrͲsJ0=ug0+tW*{2.8qZߍB9\`8 9MTew׬Ԗ?7P )^9kgY`83&',ϳ?4r̺&Wpf՜qCSfO 1ɥV t{?v0$N{\X%n-Bh.솂b >@̃b^ԉQR|!@=;gFi.#+/?g}%7a+nz_mU|O3{ܜrLHALA??q/8 w$ .ɰ`5SdS꤂E\kV\Ŋں[Fhp찝k3P̖3_ۗ(A;Yf:fw#PK E,=V !pegasus/HadiBlock$RedStage3.classVSU]d@XmiCЊR-ϒ6RiUlda]ԯt~/3 8|3s7!Ќ{;ܳ÷"Z1eZ@NDwc'"1>0/B$,pj?]G '<P kyʦ3M-Y)ْ\0M+Eudi0ҝK q2k:m)o*$?Wa9 yE:3 eQ\R9i馲~*jSԓtp9ýok.7_7jz6窶`ֆl윟qSUty2m28g@' gQ.f)к«C%d|9(l/׈!S;]b ȍQZq 6TLcSj1$A.ckԽ(NՌK$>#[x-E\4G!Mw^;Zco4~3nǜ^Fş':aLvQu̅^3{ي:ƫ<FWH(DR'.J4 XcC!ѱ4izAl8r_OBR6%|-*". ~f\[= Zcdw*PYʒz@Πnf9AG%Ӽ(!XF6+t~0=$,vSK)ƍ/Tȓ5xZ'DeRCO1Vk\}kT0Lit7l~CcOWW4>CSF؇GݢPn#Fghb`Sew0̦c9=|?G"𜺂Dũ.tSJ8Ճ;((iQ|^C ;OpndkETmއD'Pc` `pӯ`(U`(^r=97ʂU$I cGĭ $nW$1#y>T ` >A*?;<PK E,=-W1څ#pegasus/HadiBlock.classX |Օ?$ߗɗ7!  @B$/3_8 -ZPD*-k]AQԭ-Xnmn_>Zs7$ms={s=߼ً@9rp5 M&c3yɿ27&0w&?`C&GL~'9pU9?tiD~!/_f_K8;>C~?G2EPWe'L>2Md\FIBFv&) :PB[iLL`31[20u`I|Ӈ8\ 2dyBGL2%888AƉ28IbKd,'8ySu&c23e8*Io, g3px3.n WKbX#aK$cRopep+%lI5EH[#j6APO 0 5h}hEڙՁV/irbE*fS;syխ UMu+DK Yh!;]sHeP{TZY emᲕ48ᎆZ;M-^  F#^HxagN5eJs|s%]AݤT{Yc$ ?JP񇴠V#ހ"nqz#jxa!;QCW[(k[;iF6i,JmtmzآԹ^72V4i 6@ZFgUդ4Y7HV[p4\X˓[NK-j﴾jI,!oqkA",FduB 6et(Ʈ0FZly'ՠTo *n?RT8ʠpuo& %M:ZwmIITteKդ@,1ףkݡ<;=;#[Au`p4!POu )xJ={c&3y!إa&Gerq&']Cزjݼh ܜ~Q& <$CAWPVghιL09SrשgԳYuڜu,W_RV[ h5.|דrKVЃmخN,C1`ڢ@M"05oK_&Œ]2R<;E7OARQ8#sLkYqucʹR)hڐp.X;볎Ya}¨(:'1/@VzyN.3fqz$x88;J~ 0 }+uIqK}=Gr'96~.,*5աgIN3  މdcķQ'_ |.KV0Q^ N7f n. Rp+ޭ=x/-W}M|@y@r;NLc,V-DE ~bf*МuhUPF0Љ;H:ew# ~ w"V}OZ[x7iN ;π+p2e(޻Xو ^SGHW|6hru.C`nS XdǨNAȯ׼PG qtURKQ =(!? zisog3'h"l(Y܅_lګ3{x~n֜Ϫ s_sɢF~fr_m9$J| ^NU6GD}H)zd8.^;)$UZ >$|B'񠄇G-mb3]lF_ OҥΈ+WXj݊J1*,1G_* ^i>znN} z/@zn$xqf`n5K(hˢba^#&?ӄ将#f͊Q w)S=II_~ h"٪Qb[RS7|ɍWoQz}ͦpQ(yAt3ssgd^QE^b:?}^d]XԪH-׈InoĞuCR`uЧ%룝$Mjxcؐܢu(ekT_T3alFc녀ѐ8}+?uLLɟ09}a1Q3q0I;ȾSѯP"j*X@013IaDb".@GsKV2dgg_쳴]DǀBLyxPO|&!(0 L< s,OR< i0a KSޡ%Zv99lH=Pl%(l._Q|znlF/ٍ^K1zK-.{F>!:udtF$;PrPS2%ryn{48q aq<"ux}bu%hʓ^:K3H .Aʤn()>P;5)XaqNTi.p,a`( cT6>WPx g7gsz`.EW3+FWh8chfg[4ʹ N4˄դoogx7ym֚ψ[ hgѹ-Q%.tIY} a ư%mClC!zrː@JP߄xj[ .| |:t؎onq|y>o?}|?%ScŻ;=܇§xWwqY܌XDxS&b@CW "wY/tWDxKoqbTUl눻Em(lgčm"9AϮ&O!;)>\xRme0^9bXE .$ EpX Z)^Q-XgE= V*xM4Z$`df]([ P&%#@@i!Pnh.hU ]BE;<,:``cOq%K&~` w]_] _1U ߝ_ͰBt[ϻ{->y>Nq.>5^fԶb^I*HvrACc#P,vl n{`x ֊}g, WTwT'hRlbI S{mv1p=T> ]"o"0(7x/Wa#mWCPK E,= V !pegasus/HadiIVGen$MapStage1.classVSW|e6Z T%m%"mMrVݝݍDЖǧ>tK Hǎ/=9q@|Af5a>$⦆[XРkh(|v7`Q] bY}eڴ`!^aH;5pdNiEx˼bT=S>`nsdЋ-y ْ+oŕ[¾g]OSxu! ^s0U dBɲ T5c`ޱ,Q %:^ ,Cx0*3n5gHuvB$ #eHy-T`BVXԸGŔ-{̟qv(i ! ף'lswfǠPN9C zh8OhH*VT2h76 LURenmrN_Ws+)v!B}ls͡iIyqmK֨9Gc}oW7M9{j\@Ik lPCߥ[|p*?HO! éqaZ5騠*߯:~Daؚ8*6h'}.vFs!}0Z1HdqF ]p{j{&ieR.E#w[f_F=u:r&Pe("k cQvD%No/Zdv1HC }@"Nƛ ʈ&G^CN%pN}g!Q&,N*GM%ɾxxAl eJ*/_H0 u-"mhk^ۂU&z}OVIJ- 8$D#%hcHcLI-OtCpv Jn,pǸHIH 1J4fqg7yi?N񽈽FFkU2\! |ҳTO$WȌ6M4];{4>'.rS-iB- "38KT#]?PK E,=9X?\J!pegasus/HadiIVGen$RedStage1.classXwTWNqg&7! I萠$CҊuBPiHQ䆙{;wBhU})'߶MBZZ˿Bs&!ˮUX}9{o}:^# A#f ,fC8 #OPBcrt R|<\=rxJ! B/(B(WT/+!|_G韜}=o,s÷B6#տ .)js=nZi FJs h' 'Dwx*n0SDoVJˎh!^g9!==h%MSZ !lP,ďji#9rnUGՔeNMkۢky-5ǧe9-o1k;$Fϴ5!m?Q˶aœm8ҝimF#O:9ݷ!E'_tlVOsH[6 Mbå3;afZTwYG6S\fo0VRhҁ&u3]=gs#xSH-[l[nbkܹ .߹|=5`]%bhX3,-cg53'׭Ջ WpYfϦЂ*9nUc1xƪhĽ1FɝHҋ[뗴=qv[JU L5H+D481MߝnP7{y(.Wz+Z 5vZf6M0BVNG YK[@nDn9ѥ ~_~-fn,ٴl)m#ѱJ츯C xQ/ Xz;rc/+xEY*pUū&Y]21Hx U1 t.z2ˁ\V;\WMd;;ѣ*Ÿxȣ*(x[_7a3 ܼƧTk--=CFk,7njx˟2:=zkwp/ckvATrl )[*<Ut-'R--е&deS=ZS..hM7D,k"ۗ2NT1bT[RZj~x<&UM7Rq ͙"¬@UČ;Q}˖ZQ'49?ET&N%~tjaQ7uᦒ<6,kWр؄؁~ᐋQJ}DϮ( QQkΫhxZ5D9l 1H" xz x/𺂿ŷ'l_F<&QnJm%8kh*rWn<+{] :p$ rol4qS8o`z?t_$Cph1)Րp{+:W F9$aQA!V0!B?Po\0uS1ʼJIBۓ xo3:8Z zq7|gZ(ڰ1I~7AQBzJ!Q5 }iOV85e4v]G[gW],! ^=U 8C Wʕ̏%UO9y|} # GPK E,= x pegasus/HadiIVGen.classX xTof2 !@cH+a),a 12̌ ԶjMںւZj$uuiںUZjk7m?o& _s=sνć0CM4 0 i)ͷ\+RUtlp6Zz?th&,[Dn5Phnt6i+Ý;u|_8wإn?{t֑ѣWT+}pP؃s?AxX#xLx\:OuLNψg5<< :6^/ixY_x5^~#~+;J'Ҟ1 f(p v&d8NOD*H.ƶd:bpeGTKge'g) If"׽Iw$ѩB ÊɅ45N"l$N* X2PI.ŝM۾ٱD,;WZZi-pFWԶ%v|ɸfbSזrv&iYbGbՋ5{=XvnۉhK{6KDgtTW[i'gƅ[NJ66)JdO0Ȯό';  KC \xtiIt#1@(:)ID\i&kŁ#*Ab Ek# Nƒ.^kұ˜ NJ^befY?, p2' SrRrJ.Y0pdd.vהY5FZnb4g`3fӧF|\#嚆?Ol/xxWaeI]V`sٓd?G:uf%Ӕ&1W_M ;alFl'båcL1Ŀt&>$o#!ߜ9s֭Xx~|kFkqNuYKY؉UvW GK99bv˸^Zp6n$v9*G(MU>Tke%n*W/+RRSyU|ҔnFS72IV'K:ZrvZ343B) +[\iW Q8jNv '-j3Jhʔ\?yPMK?5v=Cg1[M5T*jQЇf-䶘j`pe#`ZUGJV2cL5R4)TjTE=ijƘATƩ񦚀w*zXY Ħ:BW(3ʭrdMeC8X&[V%qf@.oL%+sNPXp>fU*u{Qy~;[b-MKcܡ\V܋6΍*_c\ $Dx1 T%|v*KRaz%XRq'?ۘ)BQϭUl#x6'*);q8<evV%U2>_</**~A.m0Z$-kQ'/ p!VXgĉ;Y&̄JJKәU|H2U#4{7ه #Ek8X*5S̐|hdVnܢ`" ɫxQ2mӷ\2d\:0q!B6 r ha?T/BHV}) fNvTW⭶9b"3ZThʻ^y=_nf(Baz>{:u0IF4&nxQ-i.Uq0Aoj` `<JQΚBgaWUA_ vCzan;5Σ/`4w*EPt.hW~7jjݾ#vl\  wzP;gU GZ/jw6nm3#>QC&hFKFK]ipUS;$U(-ͨDŽk ޷{q֬54oj'jhMhG&Y&"_UG55M$Gp7qfZav2\P鑨zܨ.#2e̶m?=r+Wa9f^,]ܹ&~݌p+qa:w[,%䀢w9d{IoYN(yPZmKGmV36h=8Uok?q z~gŹOq~qzLoW{7ty#3FKz]zD ȅjG5yY]VJ{9|̹z>GКHڵhyЋEއ!XyP#URry\Jˏ p! z"h{nJ )EE×4\B{1I0@_X0hY}VуmX0/.'k7-oW68%:8yYe{/Ixz,X(Z)VtΔ E,K,—WX"26"N`["Rp%Jc _w-]jILW3!PK E,=)Xpegasus/HadiResultInfo.classmN0^BCJK6` +RT$@]UQ.}Nx q:0t?/8 ǁCg2 iRsЗ0XV]Z3B1ڨiE$9Qlg*$PsQe R:&g[RB,E:Й7N/Ǣѝue,bus8r6 }{4Ƶ PK E,=XY"Wpegasus/HadiUtils.classVsJB6Ѐlbv1M18YK+{miWV4MBZJۤihSR@SUJb2di~?Й3]EhwϽss]2BOipbVA9 3gU;/PE.UUUQ1a qzī|fqܳTp9oiM +-7 {j `SCvv󶃮| ~r""C3(p hNCX(X:jkMkSW"~i*P!!*Fqaԋ&FKnq (8)0&@\<nj8Keb >"h92'4(W6iA޵D'RKbG 1n~J%tq ^!zxXT:񔡏RQi T2qs_Ep,xB*'2DN񵈯Iy}= kx|>a[L+7k*bsT)$ b40q_-#n!?砈ggN< y4h|~L*'0k7頋)qG&ݤ`{3 Ϡ1@PV@%JJQ.(cD3=ƿG%hH8/!(^Fxn8F/`cxP<6!84ehGx {]lNViR~G)dWmK bCrD ]U%z:JJ`}\Cx{-tW\>6zﲊ߃->h;NZZ jg豃+&(8%E۩&WQҗc4a5 1**de51 _ RW%~PK E,=JdU'pegasus/JoinTablePegasus$MapPass1.classWwe~&L)[W(R*h,eAdNi:Ό3lwU~ߪ+wٳt9g??g|5i.yo@/NG&# Q0 E"(dcG OqܫbJtmUYNE}ylT񐊇U uCvN/]Sa¤Yu=o3Z9ۆ{%V۴}k:}mKC[=7aLyvz&u5d*LB>jgZZA jZN7+Տ=E*i$G',}-;Fb/M Ţm v} h uEsl:fLKʗF(x\/TtpY(2|dJɾ#H!JQ`<^"A0tک媹d my ^$Q`~?^Yk#Yf6k${ֹ0iY:+e+-] ~#_t.srb~n0F풛3U,vK&4tGC:5tSK^+ ^^W7kW,T&5[S*Լ[_xG6jx+@ÇXA]sE_<沼jTp5SN:Se*M+Kݑȝ>)E4|4|/9ꩥl})\v)0+ _ok ;K)'~y)p)[,xd%euI Kkzeun Gut38dֳ38tl&1T>U }R^G2T+/е,dՄe -]tWȑH,QJ#u"wVI6[{#FR֫bOԋRҖݝn@Alc [K,|qM /h 5+_W s&vR+@لNsٮ@:m|H` Bp}ŜCF} hPB=|x 2VtWRny"1t[YatԤHkf;X6DO!r2,Nyv/Q'`kև6D-a-$k#I W\i\|sO|L5|HsXrPG2 zOfvB1aDю59Rj\iL6Wqɛf7YƚL;2J1.㲌T`ER S6ƯWwԉ$DS浴6ZݙUܖsUp@w9X=E Qc;]ٓ'*K;֕$TSfl8'ޝb+oAa wO OPK E,=!1'pegasus/JoinTablePegasus$RedPass1.classWs]Jb@`HD T@j;` Qj0Ů E]Iy]9&Iɳϴy4iF0m3ә|LگL]Id&s~qϹ_w;q1-Gq+ Q!T%OFT Q 4, v :8H5j|T4İ ex9zEh I<= ķ"v <;jex Okx&FjxN+mUF,=N٦8R IG >2ea2ckZAiIMtE,1-y8`:x*b`' TNҤQYV9/piiXNr^˕i+gZE9K[yƌ,f2lv\mKWw)o/F9VQv!Zw BW.4!;Z4*rCi%EjVr=D\vE=$ 4=f΁قt|Ӷ< g<%O 鳷L;(95'ϺpC5;Us?3YhMlOeE56fW܂i1Eucx?/% /x?*fF1H$jAc>vWqS @8K( x2n@ A"_6jlD\MBpӶ+=_F5o2n 6hJ9kbʮ҂@)>JIs |K$,9r~{eY*x3Go<1(w*ǒ| 1wc4_{8p=Wի4?QFq@W7G!K]B~(O xGi(Y߿s<_+<]7#Ɔ> B|bÇڜWg{j9R#.iDgFq88(emnDb<ГdU:9qW'-y;PK E,=* pegasus/JoinTablePegasus.classWk`&dfG %+QQ0BJ)XKa3ZRVUk[R TM0-bB*־ߥm_jn6a3 {{y}{BlB'e|ʇbV g9ȿQ&}<_KIo eܪ+>|_"nq;| f] )ܣ^ ¶ToV (XQMv1G1Bvnc>EFAnzχ+ /'dbd@̈\ jcc_?W'Wqf o'$n_Ro)?*x ox]=3ƸnN T_6ڌ †wOSF`X_99lσUY^в$vօ+/v;z_ d;T7__Vh`Օ%Nq7JñȠf!٩#U85_5"8JRRC(+ѡlݲS"\ϭͦ'FhvcD5Ff[}vHb=)kuFv:l!LCW-ݲRIHIa78iE쒔i;<ȋS{bO[N(.oEnW%e[9FW> rjfp7wB2&o}}a)A%y\|Vy>R'C#* @FMAYQnF9=Nٳ(t(%!*!獕s\c2sAy6Ҥ:ng8FёT~e `g0a3| NU tccpi^LUep9's< 4 H.}%̞FVY* {[%ϗ8T [28}SC f}љyGEd,=^B <ux <.<؎a']xA>n>9roE0@D.7ė@S:~7m\-Iz #9hsUc_rݘsmNAXCA{{s328,ͳB 1I8`}k)*+ғAe:A?,~#|=b*$K95dpYv^;Dg#<{AŮ|Y:[Өi@#g?.+PPK E,=rM` pegasus/L1norm$MapStage1.classV[WUa )--PzIR Pjk ZUz03k.H?Ͼ@ko3IP˗s{}̟/ ucVG>:pW jXqu|O9r:s=`8 uEe/b3e Z2+/oZAY(Iҟˢ"\Sˆ_3=pI#gYҝ ϓ;*'-ݼg:M.7c2oՌpD&35Qm'cڙmU\M7Y~;JN,%خ/iŁ!2t!%1q d;D,CD)uښO>UŖԅU}״ vlJ 1pWFipp~:{CiAIrh")oP(8(-t|ӶDSV!XihJvL)rcp=J\o}?iwh 243]_iх4Be h4 0whf^:F.Nt݉EDnz| CсXqV=,-X_=G|Uۅ=J 7Р FBi4& B(pb~taKVPay\C ~.Qj5BAk $(xL$*7NZt2JrL0T&CČ٦Bzo&i/Q/;}cC$ڧfv/w;'UCwhM Ә qft'a_PK E,=F+' pegasus/L1norm.classWwU*ݤ:EBB JQ; #Nl,DttJڪj&q_fq}W];9號$JHIw߻~1a/ĈQ 1 O ki gp?gx@9<(نFq (& TLh؀JNJCrr^ [ʰJHNEpT*BtJŴT4kbZ1nӚu 5bיqҤlE el-J0mRgeإLwMt(c%jE,3&WxUW*o:6z %a3[]U>W1I[[0)2Fq* /"]6L0"}%Ӟ/db sNկT:\8OǞ0K! NVpRWZ٨v^`, Q..*ر5-T݂i)XFGܪnVйԐt ҫ ӿ\1UVW}^OKGpGT[Wc8`˲`Ȱ^VУ_OOϙGɣɜt[S$OFe29laR:lrY5pegNe$1x xoh:›:;*>1{U|c|`A!p {;(izIϸ tX_> y"*h퉓}ʃsy2MA縨 "u||̩;zT%\JFŷ::~KNG]7 ?⢂rњ~(Wy{g#@랁 rր%A#8ȑJ6_Rc簆{᭳H,l$Ћ6f>M.~;HQFN⤽@whjjXK: y"Y$x6#GnN*-곻~ u.͡ylWl H-9l` m5l]C;I&~jL:feP^CՃx [GwKz,]dzjJfb2Xw%ħюqs A0xpT# !;\܅~G(=qEϒ@H,z-Kq<ېtvY $7pSHR!+?Yt..4ɞ2o@ע ALӘbͦ#AaL` R]TOS[ȫ~GQ'O~iOh?PK E,=6MRX #pegasus/L1normBlock$MapStage1.classVwUݼn2iKXZJS *L&ttf]usaʝ+wi 89q1w{w{ǯ A| 7؆qPp S .㊂.LqU[5w.Ǭ784('!8n*T089嚥/2*+2F FiȸU %e\5*YVCop5{ӪbA-pk )٢u7_0-g߻_źe4udKSkωV,;َZq,Hy̰okeR +_U;`ˢM5-M=[=D-3ڂXf3VSvmBilrf;MfdO)QuzdSżkYOyaV%m[~Ng]#%3On}B@ǫj^,eF#R4-;air]ixQU T> ]QX09 ]؞a.Eј2Ug ><(<}j{ƚ̀lesᷧR>/J]470,zxZdCm5*SVCn#QoœAm r~!#m_n"IBXf#qxh&WeX $|\J MqOb_6)=H PK E,=spegasus/L1normBlock.classW[~dT%!&$diŘ8.,qI0IFhʄ-,ekwm׵ݽۚ-ڮ1^Y_dO2A?|?{[G0Ղf<&^Wd< ) S > ] r-|c0̢ sqUN+P(hT %n ؔGE5\a\I|&d|VA<C_ g>'ecqruQwM#9RGJFA*^zviEݴ%IL\կiK l5BLw do 9aU썜Q*%R/xtַ.Eʆzx,tIqBZ/9#=Re,9NrδW6$n~N;rB3y5سf!ˉMnrrVKLgagf:hKFԽ mM:7g$6~ zUW‘"hE~ʐBm#^W}@BwXsf:=/.qqcyCoঊ*#m=M?QSL€TЩlvJe=eaڐQ~ vA 2}>"a:]1J\xEt!1ZП*~7%**~7UdFo;om ǻN(Zbk׌<iϜjm 6UU,`?঄BzdרxK2ȯf. elxƩ_EVXRqGIşU7]?z˗\% C%RFb= 3j'b}VJW 3^XdXX Y}_l )6.p3:A)E7k Aim֢{S ̱􋺝D.kUiDpb% n7PFRɰT:Twr(E;N4вA&a7$_`)V+ L 9oXF>ݿy2 fޟn`㚻c[I;IJػՊ_͉?Ur/ wF<ѣ!^ iUP;H0Bm8}<(iHqf>~>B䌬ͲN;7U\LƟ"ڂE(M޼5 (.Vj9K(8sx|xN&|4)x5)dkmD& Q)!!4a qB;\Ķ%4bkhY+w#0rr8smu6K8]pRv%~O*a ۛn'5Ȁ\O;R;ހ\Î[K9YDh+S.FB—Fgv۫sGdaBz}*<&5 E#G|{.pYeAh4@QhG<宀"Ly?'+P^ x cx,ORd(^IivT RBN _мCceЉ仈TAߗGHYEcH3Nl SjHx]E8zkZPpegasus/MatvecUtils.classuTRQ=7̄!D$<"P)-:$S02fR `qƭZ~ _p׹XJ}to8k:p$::NऎS89aLGGbBsRLj8aJ@j7)MgʬS5V\j[5ƪMOT l[ b\3F~6 ϲUr3S(qzŻƦZVmm|!R`s_@m65Ԇ疥7K}UV}ߪz_bfUU0klrJXgT6Hàlܬ4RXȄՈ@hڞi4݊9g!m2Q6tM` \ ̃<%\&p3MˮnE,&e]*)k,%?\? uH~2[w\umVviQ Mښ\4TH5uVȅ ǾtBSE.J[)Qi$:-( (@03#oQ!Q(ޑffvnfn !Z80$b tkRt<=7ų!cf,S>q3ٮ_"&㪩 KqELX|3S},ŜCm=_& \ f}27icl?JaM;"et?A*vy]_@y} PK E,=obSa pegasus/NormalizeVector.classVYSW.3L7m"MhaFIŨ"A0eh&f3b}3٬%@?T*O_IU*v0Z~w{nqEA}%Mh$ *(Fc #a%V`A_BkMVGNvtg7ʸgTS)nRf ,Y*Ev+KIBsGd!Hw^T`eCrd}zYM+rhQcc^9˽pw\-%' )^a[%XSOtaMVkœ=hTfBD #a'e&`3JD .ZF} ڱ4~9 0:94uPKIhD9E^E k=]<ԗ?ao6[ []i`OXQNc%(- Q>C&Ra>EZTEq+$p]iz<5wfPCq,  9T!זUMԩ-C;i#4bopOKj[Gj:LP<%<{w5K ?W(^hFϡv('DeA0뱞vCǚ/>j] SMcB5͟'ȏSAY PX5!ߊC8#HqGI23' : !9Ḩf(F; ;X%uQ4iEt(@nݤ{W(.b#nLY@6^GPK E,=u %pegasus/PagerankBlock$MapStage1.classVSG5;;CX>Ԭ&Q .AW1,ILlggff-䒜s-K^D,ORJlT.ݯ{}'Oяu踒U3M_p]ǧCCQgR!pKm]ZrE}\CYÂ/)˱iDvhAud9vAE b^%-XC-C^c Fq?k $(yƃzCr;k3kھQ S<"+^r%ש-V(=Uz'"Wg]fќ/T$#6P1$rN 9k7Bk,Co:æɧ,*6wjr[Nw/Nxdwy[Ŷ=SiEaQD5,jfЯ - 4,Qey0Rq*c}twӴIɘ\rlZ7 buwݺoV4vM2 ਁ/ =; |N@7[i3d#*LU{_Lݲ7j4Z̛8aSL{$Sԓ!7%2iBv(nA0XB pU4|϶h֝θI$@qGE٢|#bg} ٤KLSe ,c~ 10S63ړ Dt C1qVxv&e6Ax?71${ (?CyLduRͨOJ&匚 tnB2CH7ZKZ[ZpwFMJU]WR cHӇ) J%1I~tߍf $5@z,q2"8CQK,Id)Gy9U\Ζ?OyP^G 5e/&! }:A,Il&ܹg~}$4_Mmir$OS#i>B~Aa E4ޥ]'Y:PK E,=@>%Cro %pegasus/PagerankBlock$MapStage2.classV[OA^,+KX\)^)"hxf(nv>wPIO>gZ3g9sw.s_ G*q^EdU Ⲋ\« a#*vẂQ71 " {V0(-wϘin>!+̨?gx wyQdq=O-"J~.M0|Dkێnzζ]×*b]W%)#iohN v}fe*"9CG b,QBZH$ ݵeHT`6CT<=Or"MnVQZO[/ϝ7jam6D Kmy ƩF~%W4ORəg;^tF^?II\2rgASbT MƆ#MKnAxm#ћ@ Oٲ:$E$UP}I$qn T0%~U CPK E,=OW &pegasus/PagerankBlock$MapStage25.classVsU&M B )᫄@ R@b&MMwͦJoo|MUf|pGteݦ--6~{{@>Q )x= NLw#gp}NlҲ(B(C&^9F_ 0G]B&89F8cIzc` tCw3mfV0,J8ZL Jmhn弼wrD-'v2!춼V([DN+ c$`k@nH3C(czhi璚eDO˚,[dtI6K~3-+LpKn&S;et[vʖ:Ut|^dӞCLAPY+/mTZb*^yIxU]**۴xGŻx!6 Y&}CR]/#AE[9>&=) yN̠9F7[2j3ӟ1zrn(?"'m3cM"^z{ |HdؔG$5 ;tDh0T+BV&\k;K K.-9=Ų@#YPLʧ#^4O3n'!~R-#>Wxfy3`ZʈjN\ Y]Әh^c`iPV?Zu\a6G@"3`m%%%Ԕ`B: kn\[ْ5b JɓMK*n.aa{р_Z9|MeWзGx8pw1$Gs4^Gi=`$ C z&/#˕1DAyg7>QрS\ b7apu"؏m6v(ҙIG;݀br2J^4؇JJ=2րS4sp(:ZIGעJ !{9#xT)7gG( Gi$3&ÉMװb55F)+c\v0:q8щatNN!1$\&ƀLjGPK E,=e&pegasus/PagerankBlock$PrCounters.classRoP=v+!6 Lu~,hH"Jb`bj fu$BF}?ιTdu䰦!yi QuJeź sfs]|Q7_2軾b`}c[7]f..>O]T\IMŐ}7Mwɾ/LPM0p:w\h!S#|cEV p'6I9U2wW '. =t|Ĥe"Z!.^b[\oyo29V! H %Ջx p2V&9ڒqy 3 jz%_ր&a}p*~xJLnGjKRXQDE yя"r ylR} MDh|jfNJ@d pqVkX 2d%E }V\O`?p iÚ&3%iyV~@}ԔDSERVxgpIOYaPK E,==R6 |%pegasus/PagerankBlock$RedStage1.classWyxTW̄73yI&e,i8ZMMh(T/ y7o`mi]Kk J(ťUkZq_WY /W$w==KOPWA1o>,a|xȅc<{1<ʛq1.<Η?YA||Zgg\8).|/xG% =\^CH8+) EQ#ce0Ì5zL18*FPҤǴ-ɾNܦvFidhj|YtZ=zݪuYjD[ b1lF[ ZDM$o] ܉z:6bz$i%M q5ܣz.ÈԸun0:t && i vu#YM*7j i@"aO,%iœVj)[aCޝiq,ai{,nc!="6c enjQD"wLsL:fh~͔TPE4+*"Ẅ́L(@tbr+[tDSyi9CǵXCPdέ My+m) ,@E5E$ }9&&Rrw_a"i{7RܝHv&2vJ /֛CX8&B H"q n"Q35[(*fpPB߫ sQTtfj{K0!q&-~p:ƙ<%ӁsZQiWh PI' =Dvw/!F#QkgIBI'eW}T!ӿ?C?QԮYzxQX} p-3bV_ʂob]p7DTW Rp $ aH;s>RSJ(fxBn*2:\/N$%$΢$Ĕłؓ-?R3×BYPVd_y Ws>rEvPQYCtbQ:QG**VCGN(殒18Oި𦠷F<8 ]RA4 HR$3^U3Rm7EAoqL] a{w~~\)pG_)-~J JyA,ou54*COR#f,oRvBD0q_Rj[C7Qm)ɇPJ|yDсN0݇0C=(zG $80L A%/a^.a/+w9[,Cw8ለW>@2YM<[x(''фfBggIf}J\x,f#F7LڈKm#[8ĨfvLfI$GVj Y/٪n,`*N#-df(iAXd x'YKvzy#6L5fF%"8{Z-O7"BZqp37iWS$)Krc$ !Pn`J4 cZ v ^!} $gyz MUI/jG0 go sn7)~ӈtRﭮ9 ] W_פ sWpP{q((43(4KʨxGxz0 PK E,=z%pegasus/PagerankBlock$RedStage2.classXktTN&{3s $Bx D I@x4\&LwзjO >B ڢZն־۵kI߹3e>ソ9wμ>Ԣ CK)>b}>bC>LT,-Ƨq0=*R|V=Wy|/KR|YW|$Iu÷KbLŷt=r*'AݦlJ87ڦ#]kަwcƃM:]7i'vx܈z2i$R7*,M#74:fWi|쓲O ACOm`¸oܛIVrg7ҡ+9^Hwy_,|r6k`1$k<j/yɩf. +gylk;SniuM\޸6=գn{4S5]6G^zwZS*)ɮ3}75i XNQ/L=IDd`A4Knt"1~-෦Gg -ɖK2wy Q.gր@1ң(#EaMQ``7܇bA)x cU밊@=eW͜2oP⁆ Ф15GP(ےH攕 F r c|}_V}p( f!Kx#ίc.,xYkj ?tó^HP4{R )% lǘR-m+zkn?& J@~ԇԀ䱩(zxL9+CE_Q 7U!_UT-k!@ `-~ SpUPEjf@ ˚l͢ ODDVRIቴ YMQ5 !uu׼9'0iK@= P$Ӂ,zY"?9ti㘳?/wo py#oCnʝ(r?0q88ʘ>};ΈGN"Љ';$v) wA<sxg&*xQLb^AuxUlk6.L>N|XxÍKhZ CXf뱁)҉ 6b|Ss&32"Vl)Fz+k]P*q3 0p ne+6!L-ZS8uZ͠kϡeGs XmGQVokRbhTAL((?   ˮYoc9Tm. gM],XECf)̟;/^Hf{v_K$q;^;C!k 9.{vXɲ !;?]?PK E,=ޫ"pegasus/PagerankBlock.classX |U޼\E@$/@ ;$1 "ƛ&y6¢n[+uZi:䑊jJ[mgejӎӎ3mV۱2{Mu&_-;;{̥}g1ߔ[|[7;>2G${*/Ҽ-zя4?ԇ83?ǿs L' wapOoUE-'R;?oT#a cl>|DRɭW*bΓA1*Uid>*:VxT:_q*WiBDUR/I*R#*M) M:M N>*iTh*f *TADxMHO$!hָsqQh["I#KڢFhIvōDW4NZddLOvƭ;eTuh#Q vmh-HXxGfM%c=*9S֓a #a}=ɌB6=Wm*G:`M"poDgA {'A w1gK6°hI ċq{`ru0Ϥܾ v- %wyF  FӰf5dF==IvYϘ1SON6<}=a=>.2uSd94>T(xj3bGBxc+3fԠzdy[0-a=ƠV5"f^mepN7̩u&Cn5"N0Ô.Y#8۶stP*TC#ؙ0,υשsms8ru<…嵵dQ'FK,.IBvx!aq'dfi pk[~B٠XIh{dZY^PyLT C8Ai>%9| $U5&.Lʙ SAiFWP#aO&lZ_G%]V]#xA!C5kEAjB/C m= &[g|N?gJLPɨHt͚5K(1FC⭝0aCI Tm0MﴰZüWcG4֓/B`i ~=.j~Մ2^(AIRjFhF V3 F7JL3L$^ |'5T=uc..s|9f 9nE/ spee-Z5?|3v^)|1` v ].9j^C X`?m sͲcsot *}c1#03À)^ɨ5ʼn]>93C_^;Բi$k[ty;w$yGb M# ̸ϩm} }WQ Q&[ 4q k.oX׸沺5J.yWG3Vjb4I rA*yPQ`LhN۔' vtBݵK!r(_8ڢѬ!2,5zJrJ䒁򤖟a\knF'*wBszsr|i`J!I%~˒SI5ZqP@^%T>hRQGҼ۝ś/?d2iU roXeȆTYOlOXs5nF=2T0_m1 cq#73G~Y06=`Ӄ6Ǧ>[פpؤ![6=bRK[Djy3_WCLoemL[t+=L2ݷl%Jg-`R2}kf1:.pEU,鸎/thKʸ_:VMҰc5hhXeH[%b5[ ʖ#nVʫ6InkܷԾ#˦0or"yC0%̓@L 6}l(ܚI;VS {f]3-,Ncݷ [6Y[2JE~}Q7[k%׶د\KU>b1Xݢ| @J"a,Cs:Ħ ޗSM; "}iA\0 ߋRڏz=#{)mR5eЯmՄ'- 4Pyp ڋwf=2!;o 43jUgRVDUdiM h5v peȉ:}L46Pψ?!6Fr.Y؁MfAj;؆MgPvdBmc" pidP aa+yRnBS(>!W0q> s>MkpOwYeMS`bPy01{!LUJGĩdTWhfS N?^N]=|OB}r:3 Q#u01QtøwPK E,=Y{P *pegasus/PagerankInitVector$RedStage1.classW[wUNftRPJ. R-bBP48EwQ.*Ad-O<3>ߙtZ̙g_'M%BX~9$ ϭAc0`(8DeЂHL9Xaa=χ +3a` c ^ V/ /KO\Niuഀjfuϴh(7L5Tߨn$\.]Pq\)L{\0MA&VtXN2ZbHie +nS:7_W: S?;(G+z5ǐE;ds[q^pMSwRZ:-éΤ];yzk9 ۈ#A#qhvuav^M_3^pS|A/o`3BPK E,=  pegasus/PagerankInitVector.classW |S&齹3TDCm) bVdܦ4ɒ7usN s>榨 n{s~=ԩ$MiZh99{9`э~a5W}2\+U\Ï*>sC!GQ }tc Y[d[5|RܦvܡS԰Kviw3p{Uܧ8\:B_~^԰GC>c/ F*q4}xBT|Q:/ *˯oxJ2|SŷT|[׊fZNlgQkL Uۙ=1!M=li+2v"ʩ}n'RJB F6.zRL.&bfbNdiO6vqO 1_ʬxfdh e;׵dK#]\MؖP0\2A ϯH۞v$Vw,cDөDT B 63T9+bp)h9Z/F.#'Wbqkeɺg Zk42XtgM+GM]*5U\53C5,TŭUӛrk[7-6ur#2+ܬV:fsfNK'a3+^BUeZ3ᥧ`QxM*`4Nt)?sB I1{|Ef%Z,.2m%0j<&bRJƣX%KeNMsjܲ;[3TRF 9ĕ. ˞*)F`lzy v[i^Hx<n׌y$;̞w|WocL9[k&kW\U%H^EДt +6i 2;0YWFim:DN~OC!ϦH~!`=Wbu7ɼUDԗw1_^ݼ/ς|+/8>֒/B}^2:Uً1xճjw/=$yDZMh…8+ \H.Q߃5>:u37bL~Fq$;:^l iQpBjVt8! QlQj<*u\ IS܍r-ULJwcs  /Q9UP4y)Gvgrώ~'aR'5aQ4K>wGV侉Z*) m-4j 0-U0+vxĎ2Gq8ڬ܅'qƽL$m'f02pY-yN.1Dr7%4s6Dzx)J-ȰiKYOt%<0N:*m|Rvx'Rn)q 8e)e ]eUX/<=Y6&NZX>L>ΥOor@'؊`(*qI.'(B`/yTcr:^Qъkqb圗b#R0LdgJAYbXWSbܝ-:g2S0DVA0¹%ZiY:/]!kop=8ʠp%X;s(rfpsjbO}G7m-Q8qo*g9 >!2`YvmG2|$%gص0 Ý ņYfcxҡ`b b, VиN"7όJ{ xsvۏd0 *eWpU5CȪaXui TpKVpG1a2Hg!aYNY$yr_FychxȝK"à[ps&gێnz޶JO]×&bS_We)94Eѷ],cp2EU*7˔]R Cv q*vTpy~/qVI/aHٺ+ίZ5cR.)K T0J.`רoe]ɿ޺h$5䒑E;;G:%Eq{شsNG v-1#W[MLiq֠྆3tXC 1CZZp(@NwdWO4ݻCO=kE2Qⴋfj(xF*$׎A4N\:iDC'MnVkt[(M* s$m Cyу^&#8ù2MH5C`C..Io D1{K_g=o2@#9Am?J^Ps39Nzb+5[ܴr^^#8ɡ}AL,彋(NYjLMk5~m-o#i-S4  ~$;xghfb1$l3;rFbj d6([~DUuXoC6^cqs"EG1xB(t?q OC ]TJ}8/- U]9/ R$qCHq #ORsv^&9:T\!Jwn{kU$e>{l'CCWvl3Xqb~.!70|N{HDPK E,=F%pegasus/PagerankNaive$MapStage4.classWSWݼ6Y !U[AiŢXZ7ɲ,$ Bo/`Gc[g:~Lqz&dZ|8{Ϟ;{N/؍El"|p,'\DCDY(a$"*`ZD&:l9E0D")9Xa|u8)pss5y  ^Wj,#?YeR-#dSL+i9kfJ3Rús9SByM4-67 2f;3M"ts@5g:$s F4]9Qȧk+f9wZ4/R9.c*{a]Wl ]*;u-Y=!ksʶ91)M-Xq{ɼlZJ6uHd?1Y!LtpK1EoJEƕylig唌cX<(VɃYeaksr@xJE,C*:+G1M^3ɺs,MWI &.fjTvVghZ^` bU{s`R*(#t4CFT]vܺ*cE$hz {߹ʆ.NvsRN)BF9,۞1Y3 O3 VF9-wY! mk) %tSB.N7𦀷$Kޑ.cX.h,o fV.m$8rO[iи񑀏%|Oil#3\!>"s|!K|*k [kFyK؏>11m:+RF0DWFz{s1ZiZ>h ӎ?[fWQpScʖ,^c^%O >3#OC(re [hM{]c1h O<1&U\!ڎl9:G6su9"v!mÞtNCr'SW Yrq";8G-]嵫R/  y(Dw. ?qM&X|god 9 q"$@I"ڂ0X fVͬ;^*i.쥕A+[BX}Lv"E?IP2yuU2#jo`EEb %>D|Eė5zԲZX:YT]Ź0!cP,!%DS{h 6cpk6jvHx #|i%*E~\f_7M 3㚛 2(l#lfrhk0Bl:s⌊~p \WU _0r3((! )xd'ȿKvhmȵ-+mP)qd+E1qCOse8z_4κ9UvHEPK E,=!%&pegasus/PagerankNaive$PrCounters.classRoP=v!6ɘRC`E(y`0r[FQ:!#s_?xb*6td!9iuJeŖ Vު|V}g=Z/}·'Y. lr3vAawmOu ?HA˶^W ogH͜9@mŚ#zk+v)L s>n{Y~-9rī ޳e'/;w3[?:4]tLw~4WX.5כO29Q- ,Hm x p*Ff %:n_P+9sLz}`w|چagµ*MQJ"NH=@bUNFq\F KeG?)/J0gR} ˣD7̝B*Y װeJ"->A?p iÚ&3%iy~@}ԔĎWEQVxgpIOYaTPK E,=<%pegasus/PagerankNaive$RedStage1.classW{~O6ٝLlR[M⒠XB"m4Jh0xI'fffM/7(ն^RV{?)VMȆIS99|396XQ–C^FRU ^* 3T0V~9<rlϩêţSp\E"x\$"J-ɝ<#g<'ɞW/DJC %*ΰeg W@ DIC G2p60-)j<(PMF}eEGr܉i=7;|.oV{&DOkYӝ]W/oduqGӶuJg;ٔӣFjTv>5#&e3t4e?$۴Sw1=RWQ}BO<3hg;]+/xmrFziFv(7>#Þ1I\}9ʦ<Ւ@ΠИ1_+0@r>231$S g\9ӥy#+X|SlkG02 wNJ5"*ei] v̋*e ۫}xs(xYQpB":6i[WNf=~Ҙ}`-~&!?r?­RYe]r*[wS,';ऍ=*% nRآCw4|pW ?ďXOSi f.#os]i~kINNZf[ZuUMsആ"yT"J&fb;,j R9Jyw!n[4} kq{{8&^m30 TD46 h8yS~o㘥&c8Ưc\~2K胋0Az?x YOr_N=!h OaZW|b :ًOI^n'R0_ܢం;U"ؼRp{ +4L\/#HYW4.rZ }˥ P"GWe0u8qI}l%qtqVy֊oPK E,=z %pegasus/PagerankNaive$RedStage2.classWw֒zr<@(%8D6ةu-m]2N --)iR44!Qb9?JVD~r9s;|3sgV6 Q$?DdD b tPA6(r24 )FK|O lA8 sPB,?y>__Zob^xxM%xS[2~+A5 c#4H$nj $SP̴5LY)팞td\Z’TV3dyvun+7L}Ӹ[q%o^6ļwî7ъ>DNsaќ}Z8:'ܤ5rsܰ NylvOg*&oz7JEƢaqӬϯ+lM2~'AHyװLG=ldL/G]dGgAaOĆُ\ld wK2C^]خ9UQQ,Kw)VN^)UJ 7؈M*bhWсN 2~xGƻ*2ޓqTş X\ZJ92Ex9F:"3{7#rsc2&UH)M8-mN!;Ψβ G'*s2Ϋd\Tq eL W$D _n[IX$uvIh1%a`wWudt5ћ˺j g9f;:Z7zP9>F-ўZ#6&&Y:<27v|AךhU5WW9X7KjS"{7RqO 6!0┭mg4&U%\,qh#0GD=R9VYgSR9iH z7x7ZB\`#afj 4t;ʰ_QHXZ`مc1^7x~ʳJ~.A.&p>AԊ9J-GP݀Ќhewm18J،kD=,:=xzuII+0=No֞G ]’pcMBpXn)~/+䋸1CɟN'3òe ۂBŘ"m L6tShn2%b]QJBV5:'ԦЬ[ +x M<8Qa=:ۯ`žiŁKLa `k sw)@{q2;{cB8MU8`ϡ.`tgzgNzKAUL %NrtGiH>zI0 ;hgY 89P99H][P}oӯcEzO6k/9}Ifx#X5Fﯹ{J$!<Ǽ$'<$Ǔ&o07P|`PK E,=h%pegasus/PagerankNaive$RedStage3.classWsUݦdͦҖ-ZBi"mMMvfS[_o!?GȌWuFsom%m3ps<Ock">ND=8 ^tVFsCE ɀ 1>qH02"e`qFZˆL0&A 1(F:ѯY}R Gu#eT#j&"60ܨ\PJ :=j83{UKɊ[jL[K CZj*,Hj15Nw>T Bci4. vV,&ȠT 'ԤE;V:q:~3&itfݰZ]jRGpvڦ2Lۭfy:˜oZ}b\.c|޷i)`3N:'T^*.5)|mD(9m4%m4RQPzڢw5WjDpX#[qՈsFWnuq5P4BJuh[T C-ױd3mEݓzJX` *Ee'<%VpHxVsxaxT{li=cAP3 TKtkQne95$ ^&]7oᨂc8N8pϷ*8wjfX'.ꧨD)j0Kx9* oWcM.UxoYq-9Σj1nەTFKbz;e87Hʯ MKZ{$ PɤfDJr)W$0>vt.̲\e Rn@iPMujX璘3C,&*tC:C+'Ȥ@dKa}{"T R0yL G=FXNiN2΁gQ8 'g_D$\"!(YY4`% a}H 5gew>&Rv$sCϾsP *8?vv`ߟ< C&+,ķfuJ&I3NL_ՐAz<cM1\D ͞*Oy$fih:z -]~T75\@`_y N2rYgDF#@U3"+nWWA)=@٩Dpg .XOLnA hbb#6℣;:GQ%R%ױh^U:Mv KCB;{!exU"}foDt,(3KjM)7)$]=Xpҋ.^ܙsҋ^';r.1Fap PK E,=ÕZ %pegasus/PagerankNaive$RedStage4.classVISAdL\pE .A\pFҋ6CW 3SP^*˳gK}=Q(S!~λ/L9cq"N*rQ!IS:8GLڎNbmBj]-ۑ3ʂolp98'eǑtM䭞'"&YmZ/#=.V\j OXK\뙶ko*fP 3 YaV+clzQ8j\TV_AQk,n8VHyߍ!&lhйi;Mg/1X7@fI/]'q5oWF>}J<ᴮ ՄS5g2sLFKpsLQE: "hYf`o*27F%/VE1` めi\4p y6pWeY5Q1P@\# JvčUfufZ?#ap⏎b_ЪWkXVTL:˪˄WB¡xj̹]L7ɜ_І-L3jb+F]T؉2:Dz%OłFxs'2t!`"ǡH-_ ,ۯ a!=4^8 .!pw(P>ts4myՅCM^9̲ Gb\}m=ϹrϮnwS`8<Gx`b>U?8~PK E,==:sj(pegasus/PagerankNaive.classY |T7{< 2HH@C$&K20/ L(.U\j]ZVkݫ_mJyd}s׳ϹPA_{yEN)\RPU4Mik &Ew)z|9fTf#) TA9TC>y\k~_&hFGyh hУRC 4*Z&r#Ԩ\FGi4ZhFc4:FDb6C4AjiM]4CSiF5͐3lȊҞ@i,E4MXZuҹD-xktJ+>2X'xddRzSB!h~R, mәh 0#G<)P5 pVwɊ3c-̠(0ƎHl،uFT슫t=<#$LvG,٥^i ῐ?/N&88 }2ӓÔCD8qcG e$rc DIEb@ɋ)xN𜅙0fSqJ5Si5dhI%ú*-eÙ-ssL&缣KK7hL*4Ml SI/6HHGu܋t/Rl}.c, f {laa;u<"ţتcRZ/2HwQ F[t@tN[v:MMdɾt++sk/3dJ㫹5e6^mpEb+W}fJwtAw=:^ *kGU{RmN)t_z$:=@tNrvWLO:=D\bN;1ݎu܃I3iҤf̚V7ͷHGr"7|bhsYY$mF䫊%&.f`Gdee/s~ڽ :eI5=:~tE)^:?J846=zӛNO3r?Ȓ̉[igtg c*,}si&;(P}9Rs:=O쵱N8:0)Ɓ@IywBɐnf=jGh\{| J/2)/𥵯M^jM5loBO2&7cS u ױ?ceg2Kgt^*gAXɁ:C"dupգGk#0">c-9@fa\F<FɎ$xyW$9]UenPxҎ/ӡ̧S~)5_ɮ}~W:}A_'=&^ q\tJd8x^e~&TRgd=R:\yQxPN#+̭~ sHtx\}K. r~udq &wm3(KsX+be./q;4!Ǭ ]<}Uqp|gw;nȋYe% Y) /,+(o6` |Gqbd}Jri^4Z|s^SU ي<98q#r̹^O633,a9]ܻ䭖޳$'~Dv0WR}aHTo1^DsOңC<旷orJrgLx @z 9,5޽jagͨ=jLxC}v$'9h犻]&#h$8A}IzS?C0Xak ɥwK8Ѱcj)]Xz4F"E dʇ"焢d.3i1]+}L8Bcf,Ҥ*`rY/W#/x4(){hDYVڷN~fӭ=2qu]hpS?ۭ咸х޹|5.΍ms [O?{bI"s+[7^lvxc+a-걾L= _,k! :aYbm2}Ka%!fÉx`~_OE&ߦtMA>d4#6}nLcܺmRE N)\^U۠BR.]VwԹ ,<.bÀpxQ%DBSPASq,M$jn^[j QL 8q 5a:OXJ-XIiz 4Bja%8z@FS!E}lDp@c|BPm]Dt]½j+i ]Esួt=J7[t3O7tO[t2Tje%ݫ}JWٴplKx.s7g^*{(R@TĖ9@i׹`KN%J>Wp]`7kCyߣoGbRSl^\srދs3 A|~a4n'|lToz{ş\9Ope_KۻkG.ލ&̘ ZʚhaL[7|xVyW!؆PRf^፤b&²b.,؊ֲ^J|+<dìsXSA3QȚGo`6;w12z}(}5'Op&}sSGa#}a:g6;eo,cŸ|5N`_y;+av߸0A>L]Pc},Й=v6SaՐ|nY.^^mX;̻.+MdeS+H'b;m1oֺS8gpS8$oSjOm;- -LaO>? \FX:35z_y 4gZ.p-[GEBYמaOӵ)ʳ ǬCzb K,6 N4N4@sxECY [n(ݱF)J!.Up 7+q' 3]?2  \сS&'g' _z$4A1H h`2 (eجe$*/ԍ9*`$(IcޫM1G=r cQb1Bc4lzNx=.1+92SPb3ʱ(UCRyD*BFe*4Tq A4Ĥl7djJ7`(F'WNOH 'km)BBRtS5#nbz-[^V1&[̓ngz6;Nw1{jPK E,=:  $pegasus/PagerankPrep$MapStage1.classV[WUL2 54@oZh[M Ah'i 3 ^o.A]Ԯ'\T]3 ˇ}}'Cg&B1WeLa:&aVŰ nhBox!%fUBZF+2Brd IK(H(2+yhܱ K2/k 3b= Q#4inͨZU-M|WΒf3TsQs|AI:F msRuLS풝$I'jhwhEORKY%5kf&nQ0}FdJ+ x9֌Dss>C&3|RU#M87 VA1t5!pW-(;wH'iH⍃eYUT`C*Pi,:SŲzWMT=sd A,jY [ k(.aԝFv`j*t n: >tq+\Ğ82XAj219;cRg26L^/Oqnide Mx]ȂTЁ#L+$ $U6VnX#%WX;;+v ]} (!袒W>&9|J{7g Ѻ|= +_25 NOW Gv~6+Cd{:yKDxfdsCm=ǩ/vza%Z6O6/?ɭl3DoKm$cp(ڮSkYh㳱Ncl@̓a _Ej ciRzmS^zO|{>FP3y[-R?U8FrJ?Nd4:XW5툣$b8E3QBO\xWg>4PKL~DS{Pػe FjxFRJ 0VNo0 /#-Il.e0 > |/ ]!m:<@ۼX^G/8@PW&Z<.y /+8b`bĥwFoga[{f /gٷo}+nS5-J20lIޑJUNp/O4*&9 ꑑ9s*\%Z\Ì=(p& =H2͜`؞ q8;)m@ 3NCtX2] j0_GQsy):!:5cfBE`\1Kn'ҦOji-gVR7)uWI_d ɔK\њ,,V7 \rXXM{db٢=1bqmZЌ|2ں3b!Be/|:78ڷ!bWbfط` L_a[q' 3Qr۴"--!DA1[Ncع?A#b9M>-CҪW%[Qfg.iwir|asP5\4Jc4 -ڔ?_/og-ޓĊ N^#Y}++I$ݒemcTaoio:S[WɘE;+կeTf>M 9b! #||3| ܠ)U- lUR%nn3Z;Ez!'enJQ!oڷtT|U$q_[/E&[ 3CqdBOke*2_>m~}qrNJTَ߫мOh{C~pF|"3)-[c_^d-᎖ߔ ,K/uJSĹ͎}@ql| 7oN_Oku, Jwh }%ie~M Z=U؏2dPwo5 z ÏhM!%S<%D-A8kMy28j 5psx<*!JؾeǗQO:Jرv)aW b-JAa(h%B}x›8aA).ٞ"t1ŜqxFjEa*MҲqy>-\_9aSfZ`2iw/vq fz@dI;4'MSpv^gL /'2 jVF ۖgxiXvwL8#0JLdYtaUpS=|ZJ:>jReMc?3㯸PaI 17i*(7"};q`DEsLɷ3 W`\n]^s$XbvՑZյJZ_ "-plPVɰ,T BRPe36-Ā`|V)L;S[bumUMf(lfjߩXb+) Y-zU¼*Uiba_8-!u7 ָb$%~p^KuХ`JS#ػ'muU!@LppXn~"V,Uت{`PVysq^Gx'Ivi^_ /;5oJ>*)1)«GTԶ;nK"b{kWa( h#`{ĕhM(pH ne_hCd %v3h\ڌx"(㋉6A,Dcf7DPj{4̣Sp_e|+e|52; hŋH%+߫ ڧʅcV|ڛчq[LuM̒G/Ӳ Ēx5HdPe 8QS9֯fGۯZIÅo`_iBV -$~"/&d`,{N&cҊ* nb);#R$L&,B[Dszd\?Qd4!,"$Rڔ*=+MLRm%(eݥeo ?b3HT~6IlAI ş-Nd6ঢ়IK`1%@I;P" 籠8a`PK E,=8/ 'pegasus/PegasusUtils$MapHistogram.classVsEua WP@`IX.ABI 8l&,3x xPXZ lX`׳K.'ZX·=_{GpQB="V'P-Qzq 2DG|D0! 1h!apbrpL2X._xBDFG<%i0F~8Z٧ A4,/Q-ܮKb(CefJZ'N(!w4#Sr{512LZ-bU켝:P:npeL_-R[h7lJ)55f.5yLd4 (qz)Tid[ýjVG;-ǴTs娖AH'˽c*ǕTciFRGyG8y9|.RdNL(:PC6$!̙T ~Ͳތ燍^5gU l4wȴnpIn3^(!*M׶{F 8!ऀg#5hA.-k([_uYNbJMvv>#!MlfuvNRl`Ly ywe'uy+x/&(Xu2Zx^ xQ)/eqF+2^k f]yMGvjύO4Dmc|E:rĝޔwqN𡌏p!>_PW3f 9:}2UIflpdu֖W? +) DӋǀZ$2,qY<=3/?D*rmLםh#K-.XZGгb1٠B1f݊=d/鞌ڎJatqHmW%Z=p誑uEpbI1+O=KVѳ1*LT3 G}&o'|4*c`I*k]&Vj."ac/" ~-zRPRwd]_@Gʺzq=c" WQQȢW@eOZ>S,gvsAUJ m:u.-1aa*hM/ "K X5(n |`F1&P\rL$ۄ/&Y%p*6cYOTmDnXe7.J_"h5y 񿥔|GG=nlW)utaC忍w=W6fA=ht lf$ !8JVjYaDL븟ty L x@^b{0 ^Wx$8Ń|V"T HKXՋ #|CVBCrVlC<,Y/9)b`= nBR5sK5n9PM MqMW9Dvq#d)Ɵ˛vF#=JfFTrMcYŲV=\ZZ7\'K.~}8n騒W5QRlcfn#N۫XUɘ˫syqwy&3x/^7qdlDXZ8𤌧gpZ2e ނMoez U*l#4Cvd<"PY{/xg"U!җ!5. )-m]BpcI92Y0\2f{r6ԖK H&*l#V HUnf**i TS{OyŴ:PM ] %'kr[o5JNbh q95pc5 ^7y)uwx H l9(Vt!F^OJ ZT aUM_Zk :RCc=SE+)2/.ڪCF&ؗt sm̰[N7iWZ@Z?2ā H"ȧ+bvTS-񽃠dba`ZbDۊwn"; ? siQwG|:;]~4:1#Ҙ1冎(iD[n+ Y<;*:a^ Y˶C_bH`X< oۤ8 ^➥ʄ\|-7--`m[x6}AƱ+Z~_ZmW1w~0+*p7Da7-XNحeϒ*h1(6ezY diEC: u<)ru]PRL$CLPN. y1LN!u9Fy>L*)# mn 5YvK ڮCy[n Fg\e/-#654 al ͆pؾeTZ6GU3qyE#]мYm^~v:ZrRD3h܏,>rsלkۖ%U0l`gh ,aX𕁯p(7 C(* 3axhnXMc>0 U*A{J:as<,)61A:1H7:h)i{GJe593Є7{=aGcGfUQ<RKF=h@rbgb5*&o |z_4V';]KC3\GL\/HhSOm ORF?d+t4ԿkU!0C\ ,Tnc%| )/np~"%da43@RXqJz /)\+O~dߣK8Rd+a!7XWIR$ىOSdvw4,fH]Y|gr½![:h<2PK E,=1 ,pegasus/PegasusUtils$MapIdentityDouble.classV[sE:{hvc60 A1Յ$-afjJZ ^6(U>LȖR9}9߹_0aT4 a&5bN-iK88.+  yT)>q\P5s,2dglז )cl!=PvŵnCV!pkZΊj2Z~%\i/?Uc 拶FyKoH O}m0׭5m lTUņLDv2#99hJ/x@2Nl2CDCHb$ e=T`ΐwk|*ޱYcmsIV8QX8'+oW=LrUWPn3(JFSܐcZۮ%5ˋ ϮƞwE%A}`sS˔Z3Aۋ5%+LLˮ/n%/ ⊭zw\Q1eܠ Xqt /u|[ G|M7P7k:n#hh;0i } -3)Wn#ԛᕸTZ;WƛaKWe4l%6Hx|5žQ&Z.ʨTP|+mogT(ɧ`|x{mA]Pv~/1 HRqàb"EzQڙDb*XxѪwPaLuQwHD2ptId";'r_-gJ܂ ^JAJ%7zH'1Ů Wq [{'&K ;8Ii8&vS''$1I[%\"T0{:Qp]CMCjֲ-97&e--w[.iUŻܳw_pw+maKKnT$%Cb+w MV]~ [ iuc[KD.w71[\mqM1gIE D,]OZ @nW=(\Ǔ+hN w'tt {P=.% y` L!9FyLېa5~]nw̺,xmס 裙p60ٽq( %oݡj >~e%\i9aܪ[#n_g,=i'/ȁTQ9sGP H).==_?w~̠aE C] XP{.2Ǣ,i8@ Jq9ius,3, *CriHe὚VDESUYj& g8tMկ䄑X/ٶ[%}/)7otjp1ʎ4+ qM1ˎݼY:"7Oד n,: e:^ H+dRE+~@J!a,C뢊4tljЙzw}a4gM`8Z' c+ݮ7ܹ;QŒhJf]>GªZM[G~gƞUjw E1F}`fWˌZ.0xC˲edWϕe?T_7Z j^;Ϊ|E^ n҅r긅: ǧ:>m}Q&s|cwWpBWh4Y*^DpR3QkRUv5 %Qc"nT垖ܬPy&4Ҳ~XcFyxWFM2 ϗUN`dDLc)ϒ_"Fwx}~48NOP1I J}MLhjr !1xt$/ӪwPLu9Q?^TSHlJm!=˧RtNGnA{<߂-%0Bc_o7F0;Gua((E(\q'1N:N!Iiw0I!3qL턧,,Iq>w!N\#? an0CӅdĽA{".DqF߆Q+ PK E,=$T)pegasus/PegasusUtils$RangePartition.classUrG=#^kز0&D`K eF" /KRH*g>*TzvYMt9}%='q_E aJgs,1BE\RBח"%\Uk*&*%|4U}jFix L[+a~ȓ*[uުrǐ 3ͦXgxeN4+qʶhr+-w?0ݏmSV/,Rx26m22eiueoXe,ȾVۃzz8q O6P(o\o]VmCԽB)KG7`= #x?þR?=h뤗IǤ$Dh ASf20yx^.2K:?tq@+p嫫uCdqTAY7|a Gj`.Ɋfjh5C?je /URZmTrG]S!4ᴂN 1 ɽ2#@*NJpwe .}GD :2TBe[mē@+Q[3m[ C1 eJw=Ya*31(<ZԠ Njr"iўwDVލN(ޥuT:sG{->O,#a!S#Fc(a,@h' Kd+{:b).\ 7EK5#w0#v_k#v1vY<qCOD!YGp.DLP2IO1qf^{H$$8O>.$$!Gط`J1 PK E,= w+pegasus/PegasusUtils$RangePartitionS1.classUksU~fM-%XR#EE%Zh0@mJoxmݝ / ~gg7޶?Ǫ 4 ..Vz춅P?r Ws{_GZ@RW7[d8ydpFc= zc0w~D,hU0;Ma*WԔ;Wt$J'ȐSݗB-{!ڬO.oN; ldr2B #O]G=SAI~JJ23'T]g`?5Z:\ :?DbVWğ"ϐLЋ +}}"/#J E h$)ƛA@R2Q?A˪흌C'MPPRVt(Fve58d:< Yb0Eq&0I*·3l|X]jNYK1v9Ƴ}LDֳZGq1h.|?f-+wuUdPK E,=~ 'pegasus/PegasusUtils$RedAvgDouble.classVIsGFf1 K,o K(2v DANRI9qGgfq;sT8'¿ G±1VQ\~~=/gQ/qR6iYT2эK40U@l`,!7N4IJ÷oYzj+%|%٢yKnof'$u<֘d׬ f" %|aZ7Ziu*#˱ڿj^а/ie-dznMDqv[ +f;V5*|jǑG3^)K֥lRaq&0pplyL8YURn- a_ x y殈pNGs=?0n[Nk%![kb4/5X|i43M6{5ǙSYd:cp&ҳqKp|(t|g^k8ǖL |V4PK E,=UT 'pegasus/PegasusUtils$RedHistogram.class[sDkQfnV.jZ(PCkZ&m:-/ZGdw'SL_|8+ '!Ntho9Zydj#( 3|.k2g :/p#C67eܒQ̠8Mߴo,ʦePl2$0Z7-c%m:ouDy]SIHvؼezv.Mkewns'.̍}pīJa-˥?>>ۀW;jkmBx=5ؿ(cʒh.2rsq~(k+{{=F\U\an4E!a\eG>vX*XCaw.㞂&6YNhG0Rv_m/bچ_miK"}&.Žcu_ 'BW% 5QA˝hpw!sQ&iLA͖2053h&aR<50 IBJd<ޢ~ȓ;n$:7!I<"XY{tC*#9R^"#h0İ}ϟ sB~O=S%< /fNF1LqL,U".J<8!s8O,btR/RS!lG;9Z_BEh4J5:a#Yu7 DtHQa=K.YZس0fJ~lL|$HoYdT__RPK E,=Y T &pegasus/PegasusUtils$RedIdentity.classURA=B[A 8oW4>J7Vt835C~k7ZjiruZ#)rv{}t/^dm8܆#8c' 5l0FH━8SEī7gy6%ʅ%RX}EWT浚!߳HXxL*&wyiV8i9f}` f0׉y 2 etS T!lCwQ7Ū%MePhi(qGz0UuҺ(t0β]Wxf_{5gek'u0͕Lv *Q^jpmm ">aO8ܳlr**Vj-VwM(پl^s]ؖCY=nH0 wѤ (Պ[Aqڝ:lR6iVl%w"bh"M&ktt,2tl{Ckiy>air+* Ǥs)wŅ妿7t`JOz)o3LOGdSbo"}mdIl4{BMHN4ƃ$iL6 4EzɒXHQF6Nvhm ONa)673;D4ehPPD! pA '[ 13T8p2G@]pJDtxaNqdGpg9 nY{MY- l!9EM^f`ӔG21Q04Es+@$S4CSA o04XZFeq<:Z'ȸ6,h\Ef] "}}KgX`~(:7 #1{w3UfI+>$l ѱ˹<9+ac Uw!׸c)74h.f ٤27*2hT0:fTM1lTbs GqG2CW0@^@}6ݐ4u׹;'}z@/|4e{*Q$MdvZv`6nK  {ޗw )'Q.K#ʈz_='i,ī0Q&EFmKB S#& ƔT}~?-}%,8kBvgIͣA[@'I2kQ)M'B8kű( 6$kpv+RY?C2]+D@z$>d+d*E4۷o}tGB* 1 `8Cg0 >Qpb,+8/PK E,==iHi2 .pegasus/PegasusUtils$RedSumDoubleTextKey.classVNA=2@D^"(l*0#ݐ tV~_ڍDM k'rGVlq1uoWGUͻ/BẂI1,`D$UpGq`x t2C m-5Ci:g9y\_:_,[3,MB3 5h3pg9-n9mlZ)㑋¶2QtY&,m!E ːKu+`"S, ȊrDj~ۮv7R::Ó[3d܌LaK-y\ߜv|rrGgXWAb;[&j3∽1y!ͯns(vzs'~9 0? Us18:;2d9|\Hᤂ RqtX#V1{r}9c;W0ϐz2\VffR;xt{sw8z1 GuT"DMOONKGm]yS:Cۑ4_]*h|J w5FJ)H2=4r*#dȈp$N'^H1#a6QÉ_?vwAqmb寠 gMP(me&1Ȧӄ_-$tVqZ.I5Sqq&pfkF* >#kF[@* /hc'i22 lj:tC0N8E0>V$G1H9jIZK6΢PK E,=c )pegasus/PegasusUtils$RedSumLongText.classVRGIXVsȁ8/I,Bb;`K\d$m ϐ'خJ/c62ǂUqtuOO6ǂ8r >w9 ObQ%|)Tw9V>Gȱ>g7ˌ1sVEg87L}.Ϊ(i'oE}M8|7^p zo-z iN.\W'P֫]~>{U8z/ߟ2y˩j嚮DŲlͰ41<=;Bɕ,'a-lr{9^˧ktraVփ#A0Zs JylJ?fvʕ|Y;psg8O.7[|ç|zT{+wv˺r4nMZi}qCsju%F{s̙f\ Gj/yT |x_Q6 k߫7Loz?Qo; D-^4CfGAP?6AxoDA4A5zn Egߔ)%j4(V)7% , ̟:kR;Nv$s!3;ӝtFo 0`7(zAiCr ̆,wg:clr4m% &צ=+9CV XۭԄkx:N$WQw֐k+LȚؾLOڼ= -1'&7v%`Aw̱{scvzDA;9lYzOc=jer.=3۫!"OǪB\ 0Brt^|!9y &JNC_rsH癑ml2kUQ3\kQ:kUٴޓM!9$KI"ZCr aeb]ϔ=̴[]sUڢ\5sEQ gυ5.jߔ*^_ED~`eQ)lMog7[`hg}sNi&CԓVzT2K4_UMNâBWlxf̲d-gVm@Cf ۀnSXTWFZO_,xLQR$)U ݸnzE|YnTU4RoqXWu5V̬Ud\݈Rs4 ۋd/i`i=/k74xs1Ys'sI̙Z͚!]ep\dROX\RDa(%'JW~?ɯW펋>AE>7'#㪵A﯆^{aiԎYsհ( /a4P q,0AmQc/v"uyfRo1j5})Q/5tq,/3>MTP]QaX/\o\XYc\5=[Eƚ@Xɵ o3x4'ϠBND(Gl Q|Pcp{:퐏:+dps84ר JSG2zZ{XwfYz/9q6\矧،-Xv] H %r 41la܁!on]Н4yQNm,>=8Gxk? B㈭Kt \~Syt~'Q9gO}+\o}j+g,?]&3,vC-:SjK7xu^=(Tmկ2Z4 jD5^ JBOޅ܋5nl+cD@L]#^y/˃<$1y 3l > NGEyp:|>'tV)VxkWp-4US>A\y5\*B?m. ׵^6zj6]Ɩ慶+?ir}ZRWSW8[²S~xs>9{Y̳(i3=ũ-o)(&ڬ;5jKJ}yCE*|-f8VmǍfpg ALٱ͕fmҐO8D^T4<G>Κ8 r TʓhOU>ʹ SrrQq43Y/| Ҕ|i"KxXs|_Cν"_rkS0؊lձbCr٤en"jaq*𫸠cŏWpQ'r ~?檏O /O `~gYrUw:6¼B= jƓ _o`}/+*}0vvt w6ݜb9*&5r{7Lc\RrPWSDX BX^ 6s;D{hc9GC7AMe},ʗX}v|YJVwB3aSsڂJ%kD'2JdRHo&#' =Hz0-GHJ&HHLj#&ڤ#Ic'HOQ PK E,=&* pegasus/RWRBlock$MapStage1.classVsF6%GȍPZH$8 48M!)CZ;MhKe{(Fp=[\a8q`[I:>}{{1i* b |" U(+L8n*")%$ >W1/,+A3m3gVE ol- a)[J)X7}ޛ[ bͽE}NK .~W SVz/~$v.ߕZr^_3E'w98+1QB q^ _9W𭆻Nd0:Be U#snZUi5)p'3 w K"ȠC#K{7Y ǢZf@Nj2vZZ&u3Q둝_G)5:鳃qp_J }w.3&))xcH!0ÏތFȎ(N#B: Md@'eal,q%cb-a$됞_s  p ?Я3$̸ؗ9ONsi^(/:z)DSM:r!}9|xfQ1PK E,=t r[ pegasus/RWRBlock$MapStage2.classV[OQlYh^ XQZ@[PN⺻``L@%1<`_2VPB Ƈrf2xgU$p.*.`HAV0.aD%܄+U0W\WpCMaC$s!:f 9姳&IZsvk0Cw >/ 6nY3 R9Ľg]|rl8KArE:g%;8/y>gێnzζJ3`pi臩ȯ+14EѷM,±]_YYhbϸYZtllHzːplN 1o 3*5fQo ;nd{˟;?nԝq6)P-)Y+%ep|_s4;?Kۦf(v6D򯧽aqP?IIydqKmtb\ -ƺ#js}ˡ*e(rnY Ip X L2*) wpW:3j"߹e)4O þ˔0ڢ;?YM]?]% m鞰 (9A{; =kj續+JP'aw#Z P5X7y!Π>-O)}#Dil'(#]{GLv/FV1n%{S%4F 0d> e(ޟ AhfF.@!ô"J+45:xYq[ )`8RrpHGS(bd݆"xk H4%I/N%PJɔfdU4"џ.A /$e$kUI$q#T1"O*q hPK E,=!yM~ !pegasus/RWRBlock$MapStage25.classVse}mB(@ %P.B h/@ԂZ6ɒlMixA'p4Uax_}ґA=&miٶqpI9ǿDCE) OЏΈ7Of@Y"j0GRD i(9GFDAa 1̏ig" (p!驡j2.~US CO+ 㪦+ 'Ju\Oɹl|^^XY5 ^K(ͻ.MS̎+&n(9_zzڹҭ S3mu3 9UbY9FlX6L%;';HtIIV8)VX\2}jq6"'Qil`C唔H(nZi3+9ex)$9O)c *s:jbP9h;͊!G9ðL|Pc9Yz-S2)SIu) 4i5fLZ14$pU!JK۶ єbX(M4ʦeVa2Z{"֝$ۭ+pne>C|goyᇹRzt!yGgL\|<{R<&6 Qg%%G$ 5U{Aͥ]9]R[j)j%WysQkx]%oIxH+UZCB|9"s&}#G\򒀏%|O/ ۱CgTy?+K u .۽awtG*vF!Rdz|dm!y a'=PY[0yU''a(0/F5"dd]m-İ,p ೜6Le ޼Sf#/M*+mtC4:r |Y[؉Y Ýz~vD&g;K6cy,Zi$'JYʶf*k铓EZPf1a \g/J%DzF4䁆ծ" _wBՁ"Ć-"XTDմEtU[miyI[m+"@d]o[XTP"^O%󉧈|wJy {"[_Dyx>&~؟ N`MOr[AVEDV~Un\\zCAA*!^hs!H0Gԅko;  u INiO9g8^u؇۱h~BU M^ȇN, _?ڧutN/.xAQ԰6V7FMS9vdX8~GiGʬBYM#p1 yQ[XPK E,=w  pegasus/RWRBlock$RedStage1.classW xTo&ɛ$L2XI!4 bIX:f0ڦ/3/ɼaMBT,*B-(U hZ[i0 RVnڽKY|}~_r]=?=gNa̎%cev=v½6` }>lď?¿>A|TCvǁp7[pKxŽ^|䷞>g)}ȁ|簄+JH0}vLAǬxs, E.4H18"z`Kkw8ht00Cp4l,cW07A1&;۴z-B;F=FZx3FG8`kւ~C id_4"j"ƘRDuz.|Nnh{8za3g= ĒVHFHc ZafeKEHPRLi֬w]dOh%`=9Y>^ ȞX6+MtHГܸH2/ rq1Ad  ,6at; (eUk8 B}| cVJSG=M,"MEIRdR>lc9n N:zQД+c4Ԁ4ywfM5/;zA-:p2#\G]Tފ 2TME&UrXVCϚT@ R"QTb'u"}&jb1\J$b07fں:rO$UX"|&GS֮;Uc4ezbbY k =t Yù-+7kXxQpUO&M7sn?>AQ3f+hDd5#- #Z4"MGFD PѼЗG+hU ^T>4ɃBY:-WїX  y)XO8}m)+ G r A . >Tj!,-# R'zY`'$er"1E^);2-;g'P̰4pJ"KSxD(ץ,)yNp(&LbbW5bcj 6(NK ֥8<~L{{ ʊAvMbj<p?E+y>]b(3IJb=[OB]ߙR4i3sObJ =q()%3w 1oɰJOA"XPdU18mLCq7*%}Dh;0簈ADKc9Z fxSpEm z(nx7Q{.؂6$v@܏"^:=щc؊`4euQSM 6j@nfŠMǭl6v*Ʈ̇;:d݋>(~ 1cP;)rBx ʴބF_:ҪМ"(&L}dZrOw?܄ j&6'&fKm"{YFB KZ)e$$ afvTdS?p0hE& F Q i.dkIggN-9"9:)D sc8nk]<.CȡJH$!"$hBG-a 7Iyn P+Lv H@>ygyF+. =qb9"ͺ8z ;^Geи:z[ߎ;D]Y*4չ*e} Nb*bi0Wx؅܉fO5(ޒp7> 2鬆JPK E,=79? pegasus/RWRBlock$RedStage2.classW}t[eI&]ǶQmi.k mr5 77]tߢ ((nvpNC@a=_d>ϛ5[Z9>}+>{@'b-ҕh `lb3stzф{_in^^|ŇU+|u|7|ˋo;^|(^% x =ߏ><%uU)x\70C=" \^ϥS ' j<1nʯE1 NM`)8SpbP}("HqT Tc uxK6ۚQZ`k^K$A`k4*YPkQ%RzVi%X"NbYUYq Y,b%VCiTsk z;0qjl✺ySx׃^2Fp%c(qrJvp~N +M7#Gȯ4#b`i8,{gjҏ!}Ci|(\1uJeaߛg<+~4Vey4eqm42ujW=2XGb1>dv F.:F#%D4L.k$#ko=F43:qB&!G2gȗgap +ɤ6a<i2F*24H=чy 03*)Wq5&ZwCߛ1Ɲ͔1*{Jk;ec[-഻:.B9n>A9E iۏ T&즢s'Iind/iGQoPQ֝$+' |ZP bA=(wণMK/`l!q*zm~ʾK㖵}TpCXUrB~X5K\C;Sxg^hҷvЍPK E,=t)pegasus/RWRBlock.classY |T?ro&&aQ3DdB(aBL@^D7dd6g^eqn d]Z}mWW=N&a{|;r|wyء#0wv2/\+.e_f c@QAMA;SN2 9yN6dc. a2w1E|B7PQH"f2stOb3=C (XO2NrN N) Neͦ)8]3Y ͣf2S),,i Qp\PNJ~ \q?'4^K֧Ze .W *x.S;L]؀ d*l&-<W3=] (x._L8!ue`)c.\A0lD(Kd1+У`BƤke\zdTd\+2~W?)xve^\+hCBЧVzlQP8UfȨ>%n$Vu-Θ:mck" -;N-V+H8 !wODbf߼OKnBECkMn8"D4.I걮lp o0ٔho7nsd4 !m5][ tAh k.jA-QѐdNZXK8ư2(H1-A S?u70'ے~=FCs x1esI 7t .R9;@EB/ ݚ@X5jAgVif=,whd~!ACZ UxJZdV#`XޯGR8EPq5h]bY;KL^@GvV UUvጡG^|KΌvq#pB!ΦX` _CNGeP%g8ҊpzBZTOyYuv= ,Fájh HV m}5Y2DV_ AS Z")sjJK23eJ2@K4TQfr5D1λv$v GTx]\5zggfubNFڽ"Z0'TK|F%Owx;we0d [T b*]x?KEUpNc_‡ӛhT-8J:?sȀd|LO1gUa<0UsY]x邆 :=7С{ƴh7A5 Bm (ɹқsMh H$LxxIT Qq>awi-ż \2" K7 H+D[K Abz`v/h>|,>*|6Cǖ5Ce؍)b儠7{g2nk]:/嗨ƁMDgsҤI"YNj#4tol0o4f[4HAKJ{tI ,!nEGr)tHHn #*>/,E_b)C|c\sE"FҜl҂{%تs4x{/;M9ޒs9h;vL"F)'OqoXa^42YtmN*Fʂ-It|l=җu7d|Kţ."DS,?鳜WԯQ_PGܡS90Qh Eh¼[AґbZxMX+LO6-3po˂i/i*9-+~U$AlA}bO m)3=C1;#d}(+#JGd*OZa侒쏋,zu|D7񩐥WzY\3 Av}}jVF,o&7&5~diF'H:ӑLcƔdtjѨ&gC&.CL:"VBTXr|G!wZ-TUMeC6='hlZϳJΰPr`]${0$9t&u5\?ᩈq0oi!W ,ђoғK Aϰ`@gK7yyς:_%(ZKX\:"BI|@v)fbG l9n]Ufp`5[C?j5` {1-d(4m!*{W,V8g Y~?3s]E_4;  tunK~zB404tt;!GДɐI_7ڌe]&JUZ0qT!^p8(p;JQ2c&mMII>eҧMפt?R41۟5 82}P<$GjwЇ qa΅ '0(\Q3` &`3`܂)0Bt(_F aL7Ddl7wéP*kb㩐E0P` )xxa68Va XIa9\`VV /la8/iz^hoAwz 3Ha42k:pog ⃉,z?g'hJ{ 3֘ . 74@&U_Ql|H߉LL>S<S0`̖Ygc),{&N'@CPͨty\=g5̱)<0\市&PȲWˎ+Qf*dpve`3G0N+1a }eΩ=0VL6W4ETewݰï?NjǾ0^Bg`)Ńשϗjt7,)jmHCݝPȌT#Vʝ.'im+}Opn|&![mEm:dm᫶|v a{߲=Gmma|2'rtv39إzs/(ʜku+Gҧ+jj ~M .P@a QFe$e;G7b'o+⃐)s'-s'; %Ͼy+e))>bQ"P9H bQ=puK#ID6`c]n ?<?;GQ/` a'>NO2-LZ.w{Dnܻo]OvJƸP@M: T NPK~Fp~Np¿+lƯ6;5cHtl8n&z;n z' _PK E,= =#M pegasus/RWRNaive$MapStage1.classVSU]v*--;B ~bkiiUlnBnZZ/2\GڙN|pƿȎ㹛7%Nr{~?yda4.W0&cWx$LȐ0+bÔ˸WezɘbNA\Mn t 2ٙ`H0vyںFC'k a36t_*ܾsҔ255wMu]^ 3Z㮚 ʈap{8:'U[Y)8ɱK>US3z`a)&UKfxrF̫͘3 fzb`L3LPZawn&SuW2\?Uh q5' Ǹe.=ŽM2 9Ȱ y5Wxxe:1$j<ɬwBvɦ(Yu^MT# 3 Ö~ʖEh^3!(n-=A$9J϶K|vA㖫# rt\UWA=s5֣Fӳd{g忾u MِO G~~ mg ?:XYZ<"oq`k.j~#(n.VPP07%,(X[ _3=ugv tm;wpK-=={|I*w|O:6rUpGg > 7nHa{7@mоA10Ǔ$\4h}kg9N׸FAYA'b铡ɰ)|sjey W zakbV.(y:+!p!_9dUmיݙ*)z+OMXQӴ%H &u$Z_c+OY$n?{uNN,%?>}aXGRBb?43t"Q>&6j  !:{@\F)Z$ ?ME(EԯZia_N,N+[R-&n-&[@Q-*a j\D+XD@50X֖w?pH.Z&~~3|+e1I !i|©v "PAb.< _w?bxv Q.G')~S8 /FI^7HyQAN>Iy1DQkGDĠ%R'Q/KxǼ(/~bd!X JO܄c4J81 {E)W0^x6vr$Q i:5Ds,5XՌ=+]OGK }Š?< e)%K8(cE PK E,==X pegasus/RWRNaive$MapStage2.classV[SPmI # WFih[D "ӡ)јdg_?|/3O>8_rܓJ_ݳ=|#,Ut㤊$N%qZUpNyT$0TNv".) S (Ci$C,52;a[LɂES7wH(e1dlعY>nT%5SvsРcf` njV10Y@SஂLVpawp0آ:4^mٍvQF4^{CLzimGV3"dy5ٵ͌'$ $bٷh_Iųkx57!(x4^&k${~' f^'8H:JVIHx~.!N^`$'t~Brl -:'k t:F1urFc! :# $ y;PK E,=4kS pegasus/RWRNaive$MapStage3.classVmWG~&deS5j`@i` dL=3z+=O?;Ki==f޹s3Ͻs3~(4Ŕi L%ы ݸ- f>אmi8b^C w **4{WŢ/U,1{ڲTi rkTLP$x gifbđ$]ixX&߄#9#1X<WFMhu][зZ:n_p$@Q4>hۄWzW=a;tqOD*~З.*Ѯ, (BwpÈ iPqŀ9J; *%{x?` b:I2yl_Azx#gxaڮm co?AR$>lO%}&q0 t HPK E,= pegasus/RWRNaive$MapStage4.classWSW=vXVAA\j ŀ(&8; 3Yܷ9esUS&*T[|rU+f2XC~{}~ˏ76|%a=PaMx'84'JHB"!b1 UB8'Q`(, OŽIL D^Pi>m3̌;:a1kU4ukݝSG%>K(qVյM6%B+fSt7ivqq3ʤJ' SLCM^1u  Up\N͸}2mWKL |R(;knhzH}T`Br-F\b\TR9R)Ê a,uMMUմ5ZֲٽXZc/᫤LP{z ݘSrupH95Cq;2MR8A 8עljqVNq Rl!WYbiW' ;yW <2иIF3Zd$8i$6/xxUk8+uoMU aP;;;qO6'ДooOp xO^ tud|ċ2>2>< >9`hZ2 ^.t ayy°nNa-L9vHjDPSi6'9%[-{̂ KufWdg>iUDm '2t, 1DQלID`+r"!АW]gH?hN#X9n USH;>Do 0r ) ,:bMl#=]}oMk &}$D7.qM X*N\A2ɪ($‰ rыYD^'ka%~G=[H"b+vpv2Z&bҷ&L^"r WQE"jq!Ea# T/y-1W]ɷs>B\BAI$ڋ,/*b/[_DCg>.b %Ղx` )֊\>5"SyT)q<c*>泲23W1#ΤR3N 4e n96sf8 +2FV6f fje,g%<۳ViȒ$)aFOs|4gf.# Ղ۶zަgxA6gmUv^Xٶy?K i(aO24 /c^ , |LD /5\į4\+~h-~bU^@DN<פ?}دh**uu狫6^h_m~ ln-\$i­RYikr_ u`lV?T v+5WqقkL]ZvbSaeV0T؞Zy{`k/\v_Z| (y9Y07R ɣ/cySfC!3Ϛߟ`zRwQ+1?Nx* `LfS %]%@kjO]00<R "\5/rl.܌SBzN>c8D"pusȦ,p(!Gh^v} цHѐ7 3$^bB;eΣh&^šhsy(am4Zs\BK Bl("܃>`T`x7 `k¯0r08G|X#(Q<&q"><̟)>Ed8|C·b6gOSrر\8f+lfQ6;rJz~Ʉb&tӎsllx6)13f821tmQׂO纋!8aM(2m'MY$t8UZ/\=[j@Y!2MԆiԱT}Lӓd+iqtZ1lU,{Ljg|6XS v45Aoϋ4m)9'[~oRpl G1!'鎙V]& ^*kHx xNxA^^p2,e5J;seѳGk8ɇ_O(!M%m ]$ uk$C>+hs!>9[M;vO za}cUPau0ύ"w#l$Y}ccvbd9!BKZvbUuVyUSqe+yB3^f|mRQ<`dP CCJuG˃;z95#^૜V'h7w3R^3}hu#y9H1J夬ZI'h$0aCn|g>]汒>TfD߭JLah@Axh, ] y.)v Մ m$"hVRQ!ӓa%s'Hz,^n7 -W!րT? h ,,=Q@`Bg৶ g1D݄XRtUg0E6b{ άoۀ 1C&%;n.mΣ[$]uw]:=o]^rr{GL }2]\t lr!|N)r_bq70=LKG2w DB"{ Xn,@͔E~=0_{h}qIGN  &->B@ 7F3$kXMZ19 Kh{QBÏx=#^ǁ ?F\=%&|}l%E˿PK E,=]T pegasus/RWRNaive$RedStage3.classW[pUNSfӔ -%-&Z (b R6ݒͦޯ\D|g "3/⨣/:^FGG}p?'hs9|{X7e\~76Nvqۅ2nw` u2p*%exB'C.D8QLNQ 6RFe2vq HA1RA0̈d` >с0-鄥3>6!: >6du%TTcwa5֧Z:_gNV=ZV*RaSI3Ed*RQ>$æ1GS)[4hHMa-4FL3 KmtCt .e*Lҭt{9BfiNo;e'RviOh ";FX@9i9"D(s }Sa*Bc+?F;2Vx"W-"fD@E{6).֪J5o>6*> Kx<nI RPzڢw5WlDpY㣁[1ՈGsJƖFuqPzz46ɂ懮Xp`ʽf k[t~缹GEA(XE P`1%\Ǧ6ky[f [$$f p8_:dj.S)ҷ|#%fY"`Y45ϣl_=gppwu`I/C{qKI/zNz+0؉[؇fe[14PK E,=Ã3z{dʲ|H;ݷ=w5qñvL`R S: 5l)sbZnj3:2Ӷc30tu$aKv\(b3e·wc2.![K QrDH^UdUQ`VnU愽*E˥bGWM kYbu=v͒P+UaF]3KEVǨ|zQ8jeEzπEwWE-bRT< 6f8VLyݍ)&lhszjvÛad/1\Uׅ`fI/]'q˵`WF>}JE2+Md_en|K^USmU> 8o` \%BF.Yq5 tGuBW(`s~ r*bcUZw%p⏮PЪnjPR THڑ˪C3 לaPj,Hou}b*8ģe>̚.CYG8ѱ 61јqc yn0sM#8qlcit"PK E,=U.pegasus/RWRNaive.classY |T? &l# "6D(!y ę + W.junU ԸԵ.UkK7kmfk[[|sߛȯk sg~j =ȕ㥨Z RLb5&OS]kT|i&ARL?烩SҚ!L)f<;Ha4xN s<Pԛ|dQ} ^1\i1BKD )"ѱ\jqA(qM&/t+ b v\i &2DO S8jjbrP[ c"Zי79!);dSMN gi -7H{c7iR;]34 ͛EsL>-&o63|zA/LX//|i6e&YyW|W||]2 ;Z |` :Z 7ǿ7UeҔ?7#$ # Y<4+8Q~f͚jGm<.27F'6"ڑю5D`qIW痝JL-L,)VhdVg)ܧac86!#nB9cD@ ZOĢ+wc]0`paӑ6-N8M ~,5P B;V,G>/ ! `&RC&c .K SK(,uTD4(KVc,uUR. LV4&7SK%0I|8-smo,zevG*E%eOuv,yuxho;,z]k} kRxzt-Xs%R%.$)V;Ĭy_=y1dpoBT;(rnduڌ\׎qX Rl},SvG;:$t5`?"3 2lfSՀ}'56'ϰXy0id?ݞ_ZK'g.{`үpSIq;c;w/“.iuhg+ ) !"?dVPr d:Wg\U ,= '=Z|$zQT+60_z,+aT%3@IM˳ԡg\K\NҍNn9r_IN@vҠ*N\IC ͣr#c(P?KiN,4{w15Pf@8zs4y@{3 6@at׮Aܾ~GK 4qP2i=/Wv^EwIMhj\PQ`>:hkMZO_6?iWv>d\^|s_(@@ 5@?@?S@见~>GW}40 S+7o" 3N=:('|A7{]u3V;j8 "8fCdRK9:e2;}iM1LbDFp*=%D)ʖriKOܓ zS>¼9$Vmdyi_H["/R/t7_A9ki}7H<Ƈ-ZO=|?qû#êwǏmPIu2?4~Fϫ u5nn~Y=ȯ.~M=?P/~S}oy,o{w<+f~yRK!(SZϯӥD9S–{DC3#'+ jѿ0U&w:Nh 6{?0'ԹVCcXVM/n.{ -4߈ ʃiPʰuΕas )lƲ=ӼF\u#͕|nWB&^!P}Q?r:`ǭ^ޱp GgHX'jǕWQ PK E,=O_6pegasus/ResultInfo.classeOJ@=>bcq. U  Jtl3%u%Q;iq9ι߿g\h`@s9#Иqo*иIB7UZN* VE1Qlڅ7ZE]Vce^qe.G[dOK qoR8_>2$W1WZߠm۵x ``WPK E,=pegasus/Saxpy$MapStage1.classWwzJm!0' 6 c1ITShg-yWY>B4&}}I[4$ߤJ6]N3s}sÏ'$5Iv8+MI9p8c02yOG% q I`X ߐ:">)$IyߑH$s"=/ Ƌ^J[kA~ႆkBM:5i;sf,ZYsu9͍9CS%kHA+ʦ?N#(؎TdzO*DGi^=o;ֱܔC [0K'Mϖqc2QuQ | 2{Un8Aim5]-XJ(-Z~΅6VuPA%%D{n@? Bn4/+ 6 ^ .~^4fu^up.M^iZikGFxeRc&xw[6'"a/ᲆw ]ԄEV&ի ݷbk ŽQa-Q!2޳P.†drq M-ߣł#omo{(+^ZuYΏߴ>[nT5 뱪+Zcty/a|V}ڶ9ydgZa)ƭ7_>rewrc& !,˨AIs54&*թJXXR՘| d~B43;*Ȳ|` PKM='uC7?PCi)po=+(X*~ܞ]^C˩ED e&~9ػm{AOQi9u <+pݺ2S}ih_:X/dKRvQс`B;h[Nװ3&M]jHoqoF kv @dd;d- + ɵ^ҸPK E,=#< pegasus/Saxpy$RedStage1.classVrE=XR8!ω$ @A2PڣFDS/`͆Txn +2RP8op&F*K&XV[LQq!N`fn1Wd8Rw\?ؖЗѺg-8jva![He!Ws]vDJ*}aZMkV(t [Mn>/սm _;--dzjnQD*V~O +Ud "/ z |jmđG^#lH aE7}3FCwE'V !}OC`Ma(aN91gź,U0F࠼. _)Xabn a8 GoA3#hf4gf}EB Gis]L1;AzJϸ|ǯfY(fӅcb=鯑i|a_iIl_ᓼC-}IH؁pL`><{haU|M5i*]JDGj,SZz 'ȫ^If)VpDp,p#͉}n_18fO4Ftb^#{$-9?;C E Uz?Od#'}EFk1NI{&V1?PK E,=8pegasus/Saxpy.classW wF5ɰ@aXd#B41cqbXui, E#9&I)ݲ&ilٚn)!) Ը{}It?wgdl82oy 6 ע#:V9 .kq5t1*w ØCGŽ:>a SqDG=5|\O>2_>(հOgG4<*Sc:>/hx\:I }J pL&QEiUx/^U|Eگ_Wo(񂕲S`tgv3c풴Mjy;YLy+ ta`qPIgőv~u Ù'aev[+PܢCm$X鬂Eѽ=Q+XOgSm+*db@}Kr1D9t~Cbv[#Ps7׈:_8u KXRb6u DBA[^ KUuE&떗`C_A>##jD+a՞H(ew6\+z)mKv O< a&#hO"'hO!<'41gp4y%/l}@VAz@>N.qvӆ5ć F wb/Tjݔr!QMŇU (*>¿ DZW bJTI9:53,Hæ浓ݵHk<؄X$\%\'kbXx[ʸ!nznp7 1㘏W'p-^%@Sup#TGx̛45 Q$ l^o n%C\;W:Ģ Աs?gPK E,=Wz-"pegasus/SaxpyBlock$MapStage1.classWwW%kؖ8HHUqښƎ6NƩ$u,U96 a)DN(#Z7(9p,߷w߽]ޛ7y;?Rpʏ2LUӒH1I>.?>OWǧi9wY|N8*<?j1⋲/Q_Q>⫲$Ou|ÏF'yWmkL2wKz:c$^rvK%=Hi@V1ӼX>GS9Jߴ9'6jJ*\|JV*O`JG Vr YNƒ9کwGq 0)#.PvR2NŦT,d1#p:t(m߼vh^)jsieVƘH1b0m3s-R-,W`I/DnEw_?fOg ~' fXퟎiǴҰ!3ioXt"2lpdgw_@][B~6[B]@%UwyyܥUOtVUfYª]bkts2xE`ptZ଩dk1"*CqPխjPB1) a%֩Xg6-iW#U䀮fKLI$T:Q%L:T|Y_eooÆ_&M&hQW=GFׯM5lZI5B L2&'s _@v%i */ii1_RM2̦ɕ%xj1b15<5g7Lh_NJ,v',SS25=Jf3Ss{-]:7l[Ͱ`18=Zn>M'ݚQSՃ"igL`|[9\Y;;Ԕմ Ц$&aj]p%nk)A54COp&whQ]l9u Xqn 'fԲQ.NJSŰaxrtvhV9ÑU,3 HHau&x.11seTi#2uc2e 3,DI-鿮հyi" ^j-QMS#Ku2I1#]8-=O"!>bN2>23J+ H8 Տz,P5UGbX<=8T9P3]3{$J5r{Kߖ?Sٛir*5\]tl"F9b&TZ2Th`RI!Sepml'/ڗzaZe`KV:v[h*jrzX >T}xJеU顤ۋZ|[k@ $P//azbf}ZqKU=zT<<NcIbiyܜ ' =wCc5APPw;8.A^d%Yi&PN9k'Iu*e!HuWB!i)oAOBDPKiiFZZFjG0Q5t:EM>;W\Gy"JPT/P?fP447u^09W{*p+=JOEC]C.nc8Y4ןۂkxWwV,Ίyyںr䕆Pq 7P啮:f8qپT5K/Eh<a!$H0P/:GEq W'ݤG:ү4C.Na;b3,]0Y.Pd) bb?މZUDƳkO݃(y7]%nܦuZҶl l g5@R7Y#$ʱcNj?b/ZK 9Cf0'=NVqbNxFxdav Q"OɃ| oaofNv@W)Ph -(ET1Όa;NiH`!<PK E,= pegasus/SaxpyBlock.classW{U~gw3N4%kjJ7I-jI@!1Rtfgٙ-)^ PoTX"Uhi7kc " rEEEͥ&c~Sn<]cm;2wU<>c2~ eeH{x1t̶ 6m1Ty:ks13K)a@ʊ5w%")!jM is[nh)B*id\$.ʘ ّ19YH쐑LKXݵ8hDRF::d:ed*]YΥ jp8I+mx )6sGlnF=DY?h(P^tYDA32h-+gGdrC&./͹%2l"f0[9'sfčTF7OI&#f%n$ bj-22@+b Q:FIXq)jece35"CC+4!\9]iu",$㔆xJi g0z庵O+67cEƏ5Ũ^q^xFBh0 GFeTófA577nm7ݦc4DyKL#kdZR\%Sts.Y [!+`"Ss ufnnJU/Mïk/kxJt4\ۨ{u z2b5l1}Q raNv,}r{Ykx]>$d7xSuI!0kײf6׵mo5oIkfcY eQ KTviXß?k ޑW = CI+ҙvvex4]((Q5 /r8߸4,TײVM+᲋Y9qN%LL3 "'Lg//{yx: fWyZ=%I#KRb2:-9#eO fX{# fteaAbG'4EzKx)t6- %jԻ5;EW(UWjG™VQ** I"axFEF;Ebe 1JLu%DvlAdQ"9i{!5`*FjHL _@ ݵp_^fRnR] |ug?I΃s/qCK7k9b^RV"7Qu(M5O{UxK+wNjZE[?B%;m_RӎSP+q-`a$C#?JN,^aʉwy70aEE_}`lseb_yuY]*O(EXҐG5?@>~\]'_] Qj*uE̕zGt.rh!%B"D~+vMaЫNh[*wߚ x=b<zp@s6>7Ӥ|T^/&fX!v"QG /7Ҏ.@mgq0凑Lm7a/ Y_<Eƀ$# vzNJ$ b7PLQDAmc0wSۆbj Td24yG^ǺP WAC`9_ P }y\-(֦NnZY=d,%wy~ڎR䝈Ui*[O>Wq݌PK E,=t'pegasus/SaxpyTextoutput$MapStage1.classW{~ocfI, &aRZAB6v!P'f`3bb?Dk~OmѶbeҧ?=@Q½{9}n3,l gh@ISfV$:HGeQIG5xL\):\7oOHs!'TpAwe|/gHI7%~i'~<;eM؎pG9lf܌9F!ɒ5ٕeӟQPgÍGm)D2=#4КxuvN!t-Sg˸1gBY͢5`:協JRwlJ7'9߭(δ]zyW&֬Y5%Yw28?QΟ5ϛsg;EYk&mML1# +iϻNqpAx, f-z n AN/  A 1T 8doZA?֛N;M'_Æ0z͔D;}5\†l/ccG?e"6Z#65lꈲIɾX u9.l԰nDIHF @G({ aq@.['!Kxa Tc P(5[jMTfIZ>}>ퟟp#> i8# qVLKBryX9(pB3i Jr|!0oUtCy'%Y:VYԗcqk2֌CA]Q j4ug. Nm*^C+M{kIt-ϵ=tL@8z+y˩j嚮DŲlͰ8+fĆ<רk9Wwk9j~y^oYm!Xb8y0!: 7 3zGvZpcf`r ^#ݏt" >3jXHQLD> +%67 c2Y33?i\WC+$BXah"4m ُA;Ӵ1]Gy쇧835;؆3Hxy ]#w=Dq=D>t\CdQ%[ih'Amѐ8S>'i*{gp)D?L7l) 1E;['K@gaZ!\4.Rtyz2%Hm45&9&_JLW[dd^WZflgI: BkG?PK E,= ]Ppegasus/SaxpyTextoutput.classWTd%X*AZZ K4 `Dd`6g`vnjQ+nXk[kQj;M20~{Ϲgι^;`щt=QGZEN֩8c`g 1U7p>%Y6WЩ˿]6nv~۱ڣ<>)o-/NJPqu\٪D+z5v딧Y2frE {J42rۼ] OGl>mɳ[_tKjּ׆.ɛeϡJ>=H晹)4[av8!\Yb~t]U1k q-g^|$ka3G'BZ[K 2o3m c# B]`ZELJE~e݅vb HVzt٧Pߠp( H8 G8yGﴞSCŌΖVQdc5Xua ;AX'&8MŇTX}󫈑_)(ˉ]J*9Gǰ|PpfA# 64شp61l]2Ŵ]&%DBF,zJxO(Xoڢxo(>ա`iJxȽXB5% ONT8x XO@6~~o$Ja~:t΅|' ?ܣG>wDž\FC:@kR@A׫n"mhJPK E,=̾%(pegasus/ScalarMult$MapStage1Double.classWWW=d$F@QZ*&A[ jFbQ'&̄B}ͮ=5zNߴo 8HNܷwo_`+~8%\kp^ELq Y$;0q$8ˆ K0`Jh#,؂8JG&8/x)% ssP;dS5aX6|R1찚Vra&5#ҝS(5{Tav]l04]s2b~7gKkz4QBC4mdbbib];ɩ;}W%@NjuVa]T]}$Dκyk8kCZd1FRsF8;`_xb3/䶴grRw*zKYrbQ:u iXjMY0N0U*R,wV p0$*̇N*R :SGASEE'K6 h)J=5ƹI5aN']5ŋc*fZ奋­-+_uL2C E.sHld XX +eY>S]ȳ!6>deCٮIxCF;62.N:ц 2b:2q$8ޒ6aX1bNU7ڻ[=2{/|񑌏 | FƧLK_E|-vF|K  e|,$$c'vq\GcGbhY$b*!2?^ -33O澍UjmxܫߠzOKLŲU'lG%ՆxЏCWTeTUװz@ ^D/ÇˮW6Dԡ?{ >F4tmy-RnY>K$9F%iipft}"iނiIO4Qc#-x 'dd? D836@c$~wn&A *㮁ۧxg NӬƝ T k,_YrR7ƳPK E,=D &pegasus/ScalarMult$MapStage1Text.classWWWl(B Zh*h`QdtfB}`9=~ꇞ}H=s}M܏"aPCX\\hFFa aQc<&hA<)„Ņ-q>{ jRA\S pQ$ xA^b6g!U%u3 9=G䌮qU'4r*t05`;NW{UM3xm N2PT5x!R̓C$\l|]#*٩閍>[*O*LJhbvdR,PU}dB6 9{Wg(kj` Qa*Q=IwTO0OTiᒛa,kU {)Fs9%m"b"S&\ER>u Cz A|蔮Ȑ;xNֲ>T,A Yʎ@ 9:3 anAMRwW e `zj휵t,2&Ҋaf xSl;uUNwhhGodOj|wׇ3mn'۹ /lRثd il1g( yywe'3Vyvŭ u)'c$!M^𖄷qIBbޑ.cX5, ?kn?ӕZZ%PGX'>e:!:>5_\7|[w @6ƹe:k kL[RW ꊅejaV[A̫y\0tx{fk ̽*<=ͭ-K/8v!RYջYꚋyҲ<=S@?A6y~p C{UH/6G{ iuQW૜@H(= h= )!H_1GU-4EmܗJjaSUݙpЧZ>Dx]o-2h :/̀5iMx_5ݭ$#.pQDvlv⁲amEo tOXv9,/63"E("2m9!jKO %keO| /[O֍Dz6{H]~o}XKX;:<~u|U#AǥvL0574PA\F:8V `{̣HhS4qv8#؏`q{ѝn {p;>Eo=03Gp0脟хCLLp|=J<)oLq q-MK$t 8.G K +L/d8xSOc8ZBS{4Gwtg a4]L7!(Ў\̊o96C-%J'=8"ny];74447yLqؘOz cYZ0&Xvި% [ӽeԲ3ƈSd ZTC:?3>frz /`\ei:uL㤊>QpHk]Q60!rB3, "gXqьQ%Z*w:+԰6{:/t,_:PӮ˗A5TBIK]7(6KV(orhv_Q${HpRBjX.e;YDt 1*U:7S(. nFI;z(wQcƉi3% n"BoJ%{*ؙ&Jd+YUa^ukZC:"q1n Iȟ[4vYsDyfEy 2^a`<ɕD=K1A$veLDLE[u3*fl%c=u;O ud,%dIzM"+HKz'+M ٲorV{t}F!}9ItrĮN4o4Bn34UdM} PK E,=xcHpegasus/VectorElem.classmR[K@i/zU,/*BuRLP#14QU"xgԲg&U)圓3-{y ;? PeIrYXձƠE sh>D_σNOB}:H٤M^\glg2dg/6@&-5y!ls'{l|uWûINťt*TL_ChEn;4pFGay`ך{/R6NDmqIqt|HQa"72]`>pjQ™O u0iD^TMQ'(RMB]^?U(ȨDY؀Re(! /x /KxE9J|lx 3,^j[TxdH[Sy-xyS[x[;2E*JbR'>WP%\𑌏qE'1^"% U۰]Ԉh0[0͒6tU#F=^"㽂8cC-;Ʃ3/wK,skc/&R'=Sޥ4BʀxZ7C圐eR*۶ҳ![Rb@6?ٮZ3uh&*iYF{cxȱ:* QOf(&54su4Os ZlU9>C37ɌPyB K9;y7~  R'),Ia餽eWA!݁o#㹛Q\L{Ͻs~=s_ c@tk $q@ \10p]L6> Lt`s:ntE :>d9˶S˖- Wq"Weqs+8veѳ$_Lz5X[MrDǧ& mQ2QP記bH'ѫc̷pδG&ÉIyfKmjn2Sζ d{_dH*n͢)ڨESu3MDL['&Q YIu~Rϴ{mRSA?v0WB(uB'Hdmi6́Tv ,-D@"8FA×8N8A#SG6?N`X:o@h 9-!5}s)hs ~(s-pj`+25]O"p7)5 RNqgF@1׏9ϑ ]:B+bKq uT+d+#D!5Me<}/@}:7f+o [S P"01PYy?PK E,=F~M )pegasus/matvec/MatvecNaive$RedPass1.classW{e}Is݄ Mj!M lp0N6C2dg ) PAcEZ}o+ oX}n:|;?\]mP`D [1)[EqU8 \*(*p\2 a|M# NфGxREg&OKozFԳ*yNw|Wu=G w2]O%}RSyϴR]Ϥ cY; gRi;;yZqZҭ+5#ͬ%݌Q>mf9ST1mgtkPwL^dqwꮻS@f /צsƘԤMqT7-KZ2 ght2m;c)=gƍԸ>j۹i;=ᘞY^ptv:&ͦ^.uۖed>Y vJ$jy,O=g3I 栠N@{ƴ'!\ե@AC=oB%߰@{0NqV"$ND+tuL(yw1%UeAUGu5[*}A@_Vڞ3b`B3˾W5X@B%!vwFn3')/(eC#vU_>e~2xS2FN2$rx/;K.Au7c)Yꀝw2ƭ e )OC5@8`kxixH NA# ?O45 oE4P_⭊`JF`1 eMejxm\h~%4lv igap#ZFR5܄} ~KU[+;W;%a^Sy p.枀2G%oҜl Ψg/(n2i`[m7Hf\wNu~Gܢt9qBH_<}g$Y2QueȘ!o01<('>rehRȏ.KH)d ]T .׷e .o^6*[g0u껏igX+M_nwP%pVo/l69H_G۹({!Ҝ\wsZ Q34/000li7ߴ:F>yDqh;HhotT:bxIH.BECM,:9>fajxhiFJZB+D|Mu;<[@dg~ jb]B8{y6&bxh<6c"Zp6p<-k-$aR زd#h(ץ?$$y|?_gf^^BEq<'13xo)N1X33<UxQtO&x ~1|*Zq3nsJ4Ae n u5>u+nEDvrG!qqw73#,zGч=-?VO%iEjRF>&<\B{|_ ǖ ` |4zCzlaTg]uѼ}GyrDS\„T:LHWԘɲP1q$ 4ڊogPK E,= w )pegasus/matvec/MatvecNaive$RedPass2.classV[SV6rB!!@S[BLhbj ŔS#0F<7HJI_?ЇG943y==~HSFEnc J[*V1YQ$J}Od`#`^E'W%ĤiC,=dδĂ&eVΜm wM9޺YaH.}^3hYLfv.Qoq* }>P ܬSBኢoo9-ƺyѶݴm'fru3z.l7ql;V_=frY/\Ìd"en˾(T AG8C%˭M6}F<ӑzSf$<\He${؜N]m3m O9͒=ߥ?g5˻Ft=7ek;WRMaW"F:zOyw qǔ qݻ, p 5| &iXCk_26p{5|(XUW(Hi0Pd8Ҽ5m@o;jpee R"@ ; :,<Ї!}`r*=ՐIϮqtԢw<_UnEN5GXDXHlzEۑ֥đQ3\jF\ߢ D!HS jUf >*>~~y$M69gνs{/<q9 ?*`2I1II$dɍ(!L!5Ik:?p[~#+I›eVo[}6g!㝸ɻX]ƻz؀;|'26?N&b|߇Y01O2X<&?#^I,y %|A%|I_LeZ3A5*Gӯڡj 6RcZdLf*2d@%S WZ5+MhQLbd#Za ,i-ji)kImU#23@7̴vFMY]7tClvfUݺ# 6jbHM,^kL=vf"Iբ#Fzl֫yM!P<}XP# ՈGnlj:NLZ&䫏DFF$Hú&5']M ߭'žlV XjtY5Q=rsI5EG#^"RZcH0Ƨ.s{د`7(x9.&{C&LNl1iͦUFV#)*e'0 )hJS\w4$2ILҦ&sNYM\m2ӤB< fjE/sR NxD7M Rm<1<`q |O(G\i`O `WfD|O1yZ5(H ~|lP#YZu(1HxVOS ?Ss<_nFM#~ [ U̒J^Z$ :8޺""NZ B'"\)ܚY."{j+5W"Ū`ԮKTQ#:Bo.@0rHh*@az욙 kqG#hLF̭ xUM~. /3=J^!&2:r!Yve EXL'4i[HYLKhv~Mg{viE'kam$.ry/.!MtKWP-25 ^ۋw[!ZJ%}9 IÓ`m-bxRˤOIH;)^w$W1(.fu +VK yI}Zh[ŗ2khKHxsӎ(ƆDVsGOP]At@历?ؼ{]Nݍ{lql~~  Մ Bb%LmN W .s @yf.G%6ok q᱅EprbN\/Z }=l $$F;_A\Fx G_ PK E,=Yus)pegasus/matvec/MatvecPrep$MapStage1.classW[sݙ`XX`L-1& c,ϮՠxfV|$v. % C*"E._xȋWW)J@vY*ttЇiXqk48!8VRV'U$<GS Z-gGpUx51a9|jP𤊧$iȭY|]sm|C<-c) 1e53ڏ (SƤN ĶZ.I;(2dU02ӞFZr]l',O pF}#gnЇ g&9+z)65j"kNXKkiͥ N cܶw\sc3^Ń0_yFɖgWlXX ~s@`n!zRY\8Gbz9Ƃ^l@gs4=3ZDMcՅ…Z]m=-aQ{L+^\Kh/aIRbma3?D 0.-`fJHIh#tnvF;JXz -H.+ƁXgik鋔ЙTg,GJ"27G.Dj%}jR-$ˢ%)! j2ޡ L8U:Alg,ZÜ ib r1v52IiQVQwuѾyqsQJ-sZVo6psA |rV6$q7jh'.fWsK*N!؍؃;؋x`!xPqq7GV'\%}8H8>dYSNcgy[p&nyq7LJE~tFjf2竕Aj'dEʺ!~F> 3>1/ʊ[.Aӌj/P~;\b{/!`D} FWp@Ap Qn}QBEPYƑM`5#%{Od?H]cNf,[ xO&#rH=D R`ЮF*|PK E,=(?0pegasus/matvec/MatvecPrep$MvPrepComparator.classR_OPݭkQ!TT %L0<_uvb44>LFF=+&ĥ;9;~OCäF9LI1 L se˶bPVmWlnVW9_qj]KaPYCZ+ZN];.^m6 :1Si:ڞ~G̵@ɪEx4aҨ7ܮQ'#2C̙(Odfw,黖]iJˑXkH7滊VÕSvk%WwYFG ipXDApOξLP۷?NvÐCsȝ=Ͳ󍧴),!嵫^X3d˧V ʶ/' 'e'd?c *=tBO?N"y=dgOdĠT.zI8Np!,ARϐXRcYkx&q -3j$3Z&B& ya tѝʓ 1DV aփ9dRK(LFU=Ba G$~qo"P hk򆐼% ﰌXx"g)Yx.F<pij'yq) 5q oE,PK E,=)pegasus/matvec/MatvecPrep$RedStage1.classVkse~Iiؚ^ Z@PA-P6ݒffS[oxWxqTu?8wry9ﹼ?wV ˍJP2z}Xm@RC15 uY6Ip &ڇZLj5̖LjI m -&SРj iP\5S)լuk j%8[&Mk[RMiB'#ƺUS YҩE,5-hqKFOLYh3hHM~-ԯF #Av#lI(0z%zmbe*L֍6mj9ePe\2􎔕HY-F,gI֞!5z,J)4f6"a ȧ) [% µ6IXX +*dX#,:TGک۲&GHb,j1y?ނ I5@j|u2 @^#zj¶5C>FƆᰖt#J4ܥGeo2ngva(0Ghxš'ŰB +%\^ .+դ6# ^_^z6"J RaZ%@BxEF`2US}}JEDVZد^qP!P' ^+xoxS[88NKNKxGDw8,=㤂p;+amKbCBxu؏vVݐ&G,\]j4&zo$ii`.-n+I-2hhCj"# Db ե<"hBbLe>Dܯ&Iq{1%Wj$2EVLV_T숊MIvnmro-[tUvQּUBq8Z##3dr;n E|$B@gX&88!}E^sy OPҁyA>؜Xe%xFjyI'Գ()ixO7Q:!b1KA `ˆJ"$R %<5_Bqnlt_O \HQTj+]Tu4Hβl3q 77vO_I ?!Pw ;Ymف(ϐǶ&<qu^ֿ*V)ćXЊ>n|$>~|+uKr~qk۳}s ܸLU"cۣ E ]b575J̻`yۏXZbCH$MFƜ{)#C)GJ</xe'meh\9+JL$w`61YY'u򼪙M pߴ<7qٴy<dnɴQPq}D&u3qlLv)X0 g;CC,0Tt&rmI.X1=yH o!##"If)hKd+ aY&u;AJ}>{6d{4ӎaNs0C";dWc9h$SG'E)GY$O.*VAa Rk`=c%3$XY;ekM"x=ث1av1ta?1loWmuQR +LU܈IxH8r*&00:ˡ5ϔv٫5 Ŭ0$kd6X4.ጊ1U6 1єs*7UTP,] -J޽wGm;zCIgLh^Ž+0,[R;F2.ic!>mGhpǏ1n4ܦKlڤ/#$]jp!% _&8ug8.ŇIH`RFM94{#9_ˈtC0[_bEq>&8g** ,BnKt ,| $pXpegasus/ConCmptBlock$MapStage3.classPK E,=CZ $n\pegasus/ConCmptBlock$MapStage4.classPK E,= E $ bpegasus/ConCmptBlock$MapStage5.classPK E,=#k$gpegasus/ConCmptBlock$RedStage1.classPK E,=iS[m$ppegasus/ConCmptBlock$RedStage2.classPK E,=ڦr $xpegasus/ConCmptBlock$RedStage3.classPK E,=~p3 $|pegasus/ConCmptBlock$RedStage5.classPK E,=UR6Gpegasus/ConCmptBlock.classPK E,=ʎb $pegasus/ConCmptIVGen$MapStage1.classPK E,=csmq $pegasus/ConCmptIVGen$RedStage1.classPK E,=,<6 4pegasus/ConCmptIVGen.classPK E,=: pegasus/DegDist$MapPass1.classPK E,=~7 ! Cpegasus/HadiBlock$MapStage1.classPK E,=hX !Gpegasus/HadiBlock$MapStage2.classPK E,=t->! Kpegasus/HadiBlock$MapStage3.classPK E,= !Spegasus/HadiBlock$MapStage4.classPK E,=[ʼ!a[pegasus/HadiBlock$RedStage1.classPK E,=\^!\!$dpegasus/HadiBlock$RedStage2.classPK E,=V !mpegasus/HadiBlock$RedStage3.classPK E,=-W1څ#qpegasus/HadiBlock.classPK E,= V !pegasus/HadiIVGen$MapStage1.classPK E,=9X?\J!Άpegasus/HadiIVGen$RedStage1.classPK E,= x pegasus/HadiIVGen.classPK E,=)X֙pegasus/HadiResultInfo.classPK E,=XY"W pegasus/HadiUtils.classPK E,=JdU'`pegasus/JoinTablePegasus$MapPass1.classPK E,=!1'[pegasus/JoinTablePegasus$RedPass1.classPK E,=* tpegasus/JoinTablePegasus.classPK E,=rM` Ϻpegasus/L1norm$MapStage1.classPK E,=F+' "pegasus/L1norm.classPK E,=6MRX #pegasus/L1normBlock$MapStage1.classPK E,=s4pegasus/L1normBlock.classPK E,=X>pegasus/MatvecUtils.classPK E,=r2pegasus/MinMaxInfo.classPK E,=obSa pegasus/NormalizeVector.classPK E,=u %pegasus/PagerankBlock$MapStage1.classPK E,=@>%Cro %pegasus/PagerankBlock$MapStage2.classPK E,=OW &pegasus/PagerankBlock$MapStage25.classPK E,=e&pegasus/PagerankBlock$PrCounters.classPK E,==R6 |%pegasus/PagerankBlock$RedStage1.classPK E,=z%fpegasus/PagerankBlock$RedStage2.classPK E,=ޫ"pegasus/PagerankBlock.classPK E,=6a *Fpegasus/PagerankInitVector$MapStage1.classPK E,=Y{P *{pegasus/PagerankInitVector$RedStage1.classPK E,=  pegasus/PagerankInitVector.classPK E,=$['N# %$pegasus/PagerankNaive$MapStage1.classPK E,=/dev/null` stty icanon } function PrintIntro() { echo "" echo " PEGASUS: Peta-Scale Graph Mining System" echo " Version 2.0" echo " Last modified September 5th 2010" echo "" echo " Authors: U Kang, Duen Horng Chau, and Christos Faloutsos" echo " School of Computer Science, Carnegie Mellon University" echo " Distributed under APL 2.0 (http://www.apache.org/licenses/LICENSE-2.0)" echo "" echo " Type \`help\` for available commands." echo " The PEGASUS user manual is available at http://www.cs.cmu.edu/~pegasus" echo " Send comments and help requests to ." echo "" echo "" } function PrintHelp() { echo "" echo " add [file or directory] [graph_name]" echo " upload a local graph file or directory to HDFS" echo " del [graph_name]" echo " delete a graph" echo " list" echo " list graphs" echo " compute ['deg' or 'pagerank' or 'rwr' or 'radius' or 'cc'] [graph_name]" echo " run an algorithm on a graph" echo " plot ['deg' or 'pagerank' or 'rwr' or 'radius' or 'cc' or 'corr'] [graph_name]" echo " generate plots" echo " exit" echo " exit PEGASUS" echo " help" echo " show this screen" echo "" } function Demo() { Add "catepillar_star.edge catstar" Run "deg catstar demo" } function CreateHDir { cur_path="" for args in "$@" do if [ ${#cur_path} -eq 0 ]; then cur_path=$args else cur_path=$cur_path/$args fi hadoop fs -ls $cur_path > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Creating $cur_path in HDFS" hadoop fs -mkdir $cur_path fi done } function CreateGraphDir { hadoop fs -ls pegasus > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Creating pegasus in HDFS" hadoop fs -mkdir pegasus fi hadoop fs -ls pegasus/graphs > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Creating pegasus/graphs in HDFS" hadoop fs -mkdir pegasus/graphs fi hadoop fs -ls pegasus/graphs/$1 > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Creating pegasus/graphs/$1 in HDFS" hadoop fs -mkdir pegasus/graphs/$1 fi hadoop fs -ls pegasus/graphs/$1/edge > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Creating pegasus/graphs/$1/edge in HDFS" hadoop fs -mkdir pegasus/graphs/$1/edge fi } function Add { set -- $1 if [ $# -lt 2 ]; then echo " Invalid syntax." echo " add [file or directory] [graph_name]" echo "" return fi CreateGraphDir $2 if [ -f $1 ]; then hadoop fs -put $1 pegasus/graphs/$2/edge echo "Graph $2 added." elif [ -d $1 ]; then hadoop fs -put $1/* pegasus/graphs/$2/edge echo "Graph $2 added." else echo "Error: $1 is not a regular file or directory." fi } function Del { set -- $1 if [ $# -ne 1 ]; then echo " Invalid syntax." echo " del [graph_name]" echo "" return fi hadoop fs -rmr pegasus/graphs/$1 > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Error: can't remove graph $1. Check whether it exists." else echo "Graph $1 deleted." fi } function List { echo -e "=== GRAPH LIST === \n" hadoop fs -ls pegasus/graphs | grep "^d" | awk '{print $8}' | awk -F"/" '{ print $NF}' echo "" } function Run { set -- $1 if [ $# -lt 2 ]; then echo " Invalid syntax." echo " compute ['deg' or 'pagerank' or 'rwr' or 'radius' or 'cc'] [graph_name]" echo "" return fi in_graph=$2 case "$1" in deg) if [ $# -eq 2 ]; then echo -n "Enter parameters: [in or out or inout] [#_of_reducers]: " read deg_type nreducers else deg_type=inout nreducers=1 fi ./run_dd.sh $deg_type $nreducers pegasus/graphs/$in_graph/edge hadoop fs -rmr pegasus/graphs/$in_graph/results/deg/$deg_type/* CreateHDir pegasus graphs $in_graph results deg $deg_type hadoop fs -mv dd_node_deg pegasus/graphs/$in_graph/results/deg/$deg_type/dd_node_deg hadoop fs -mv dd_deg_count pegasus/graphs/$in_graph/results/deg/$deg_type/dd_deg_count if [ $# -eq 3 ]; then MineDeg catstar inout fi continue ;; pagerank) echo -n "Enter parameters: [#_of_nodes] [#_of_reducers] [makesym or nosym]: " read n_nodes n_reducers sym_type ./run_pr.sh $n_nodes $n_reducers pegasus/graphs/$in_graph/edge $sym_type hadoop fs -rmr pegasus/graphs/$in_graph/results/pagerank/* CreateHDir pegasus graphs $in_graph results pagerank hadoop fs -mv pr_vector pegasus/graphs/$in_graph/results/pagerank/pr_vector hadoop fs -mv pr_minmax pegasus/graphs/$in_graph/results/pagerank/pr_minmax hadoop fs -mv pr_distr pegasus/graphs/$in_graph/results/pagerank/pr_distr continue ;; rwr) echo -n "Enter parameters: [query_path] [#_of_nodes] [#_of_reducers] [makesym or nosym]: " read query_path n_nodes n_reducers sym_type ./run_rwr.sh pegasus/graphs/$in_graph/edge $query_path $n_nodes $n_reducers $sym_type new 0.85 hadoop fs -rmr pegasus/graphs/$in_graph/results/rwr/* CreateHDir pegasus graphs $in_graph results rwr hadoop fs -mv rwr_vector pegasus/graphs/$in_graph/results/rwr/rwr_vector hadoop fs -mv rwr_minmax pegasus/graphs/$in_graph/results/rwr/rwr_minmax hadoop fs -mv rwr_distr pegasus/graphs/$in_graph/results/rwr/rwr_distr continue ;; radius) echo -n "Enter parameters: [#_of_nodes] [#_of_reducers] [makesym or nosym]: " read n_nodes n_reducers sym_type ./run_hadi.sh $n_nodes $n_reducers pegasus/graphs/$in_graph/edge $sym_type enc hadoop fs -rmr pegasus/graphs/$in_graph/results/radius/* CreateHDir pegasus graphs $in_graph results radius hadoop fs -mv hadi_radius pegasus/graphs/$in_graph/results/radius/hadi_radius hadoop fs -mv hadi_radius_summary pegasus/graphs/$in_graph/results/radius/radius_distr continue ;; cc) echo -n "Enter parameters: [#_of_nodes] [#_of_reducers]: " read n_nodes n_reducers ./run_ccmpt.sh $n_nodes $n_reducers pegasus/graphs/$in_graph/edge hadoop fs -rmr pegasus/graphs/$in_graph/results/cc/* CreateHDir pegasus graphs $in_graph results cc hadoop fs -mv concmpt_curbm pegasus/graphs/$in_graph/results/cc/concmpt_curbm hadoop fs -mv concmpt_summaryout pegasus/graphs/$in_graph/results/cc/concmpt_summaryout hadoop fs -cat pegasus/graphs/$in_graph/results/cc/concmpt_summaryout/* | cut -f 2 | sort -n | ./uniq.py > pegasus_cc_distr.tab CreateHDir pegasus graphs $in_graph results cc concmpt_distr hadoop fs -put pegasus_cc_distr.tab pegasus/graphs/$in_graph/results/cc/concmpt_distr rm -rf pegasus_cc_distr.tab continue ;; exit) break ;; *) echo "Invalid algorithm. Use 'deg' or 'pagerank' or 'rwr' or 'radius' or 'cc'." return ;; esac } function MineDeg { in_graph=$1 deg_type=$2 temp_filename=pegasus_deg_$1_$2 hadoop fs -cat pegasus/graphs/$in_graph/results/deg/$deg_type/dd_deg_count/* > $temp_filename cp pegasus_deg_template.plt pegasus_deg.plt echo "set output \"$1_deg_$2.eps\"" >> pegasus_deg.plt echo "set xlabel \"$2 degree\"" >> pegasus_deg.plt echo "plot \"$temp_filename\" using 1:2 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_deg.plt gnuplot pegasus_deg.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't mine $2 degree of the graph $1. Check whether the $2 degree is computed, or gnuplot is installed correctly." else echo "$2-degree distribution plotted in \"$1_deg_$2.eps\"." fi rm -f $temp_filename rm -f pegasus_deg.plt } function MinePagerank { in_graph=$1 alg="pagerank" temp_filename=pegasus_distr_$1 output_filename=$1_pagerank.eps hadoop fs -cat pegasus/graphs/$in_graph/results/pagerank/pr_distr/* > $temp_filename cp pegasus_distr_template.plt pegasus_distr.plt echo "set output \"$output_filename\"" >> pegasus_distr.plt echo "set xlabel \"$alg\"" >> pegasus_distr.plt echo "plot \"$temp_filename\" using 1:2 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_distr.plt gnuplot pegasus_distr.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't mine $alg of the graph $1. Check whether the $alg of $in_graph is computed, or gnuplot is installed correctly." else echo "$alg distribution plotted in \"$output_filename\"." fi rm -f $temp_filename rm -f pegasus_distr.plt } function MineAlg { in_graph=$1 alg=$2 distr_path=$3 plot_template=$4 temp_filename=pegasus_distr_$1 output_filename=$1_$alg.eps hadoop fs -cat pegasus/graphs/$in_graph/results/$distr_path/* > $temp_filename cp $plot_template pegasus_distr.plt echo "set output \"$output_filename\"" >> pegasus_distr.plt echo "set xlabel \"$alg\"" >> pegasus_distr.plt echo "plot \"$temp_filename\" using 1:2 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_distr.plt gnuplot pegasus_distr.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't mine $alg of the graph $1. Check whether the $alg of $in_graph is computed, or gnuplot is installed correctly." else echo "$alg distribution plotted in \"$output_filename\"." fi rm -f $temp_filename rm -f pegasus_distr.plt } function MineCorr { ingraph=$1 n_reducers=$2 echo ingraph=$ingraph, reducers=$n_reducers # check whether the results for deg, pagerank, and radius exist. hadoop fs -ls pegasus/graphs/$ingraph/results/deg/inout/dd_node_deg > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Error: compute the inout degree first." return fi hadoop fs -ls pegasus/graphs/$ingraph/results/pagerank/pr_vector > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Error: compute the pagerank first." return fi hadoop fs -ls pegasus/graphs/$ingraph/results/radius/hadi_radius > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "Error: compute the radius first." return fi # join table ./run_jointable_pegasus.sh $n_reducers SemiJoin pegasus/graphs/$ingraph/results/corr pegasus/graphs/$ingraph/results/deg/inout/dd_node_deg pegasus/graphs/$ingraph/results/pagerank/pr_vector pegasus/graphs/$ingraph/results/radius/hadi_radius # generate plots data_filename=pegasus_corr_$1 hadoop fs -cat pegasus/graphs/$ingraph/results/corr/* > $data_filename # PageRank-degree output_filename="$ingraph"_pagerank_deg.eps cp -f pegasus_corr_pagerank_deg_template.plt pegasus_corr.plt echo "set output \"$output_filename\"" >> pegasus_corr.plt echo "plot \"$data_filename\" using 2:3 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_corr.plt gnuplot pegasus_corr.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't plot degree-pagerank of the graph $1. Check whether the degree and pagerank of $ingraph is computed, or gnuplot is installed correctly." return else echo "degree-pagerank is plotted in \"$output_filename\"." fi # degree-radius output_filename="$ingraph"_deg_radius.eps cp -f pegasus_corr_deg_radius_template.plt pegasus_corr.plt echo "set output \"$output_filename\"" >> pegasus_corr.plt echo "plot \"$data_filename\" using 4:2 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_corr.plt gnuplot pegasus_corr.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't plot radius-degree of the graph $1. Check whether the radius and degree of $ingraph is computed, or gnuplot is installed correctly." return else echo "radius-degree is plotted in \"$output_filename\"." fi # PageRank-radius output_filename="$ingraph"_pagerank_radius.eps cp -f pegasus_corr_pagerank_radius_template.plt pegasus_corr.plt echo "set output \"$output_filename\"" >> pegasus_corr.plt echo "plot \"$data_filename\" using 4:3 title \"$1\" lt 1 pt 1 ps 2 lc 3 lw 4" >> pegasus_corr.plt gnuplot pegasus_corr.plt > /dev/null 2>& 1 if [ $? -ne 0 ]; then echo "Error: can't plot radius-PageRank of the graph $1. Check whether the radius and PageRank of $ingraph is computed, or gnuplot is installed correctly." return else echo "radius-PageRank is plotted in \"$output_filename\"." fi rm -f $data_filename rm -f pegasus_corr.plt } function Mine { set -- $1 if [ $# -ne 2 ]; then echo " Invalid syntax." echo " plot ['deg' or 'pagerank' or 'rwr' or 'radius' or 'cc' or 'corr'] [graph_name]" echo "" return fi in_graph=$2 case "$1" in deg) echo -n "Enter parameters: [in or out or inout]: " read deg_type MineDeg $in_graph $deg_type continue ;; pagerank) MineAlg $in_graph pagerank pagerank/pr_distr pegasus_distr_template.plt continue ;; rwr) MineAlg $in_graph rwr rwr/rwr_distr pegasus_distr_template.plt continue ;; radius) MineAlg $in_graph radius radius/radius_distr pegasus_radius_template.plt continue ;; cc) MineAlg $in_graph cc cc/concmpt_distr pegasus_distr_template.plt continue ;; corr) echo -n "Enter parameters: [#_of_reducers]: " read n_reducers MineCorr $in_graph $n_reducers continue ;; exit) break ;; *) echo "Invalid algorithm. Use 'deg' or 'pagerank' or 'rwr' or 'radius' or 'cc'." return ;; esac } ################################################## ##### Main Function ################################################## PrintIntro while [ 1 -eq 1 ]; do echo -n "PEGASUS> " read CMD REST case "$CMD" in "") continue ;; help) PrintHelp continue ;; demo) Demo continue ;; add) Add "$REST" continue ;; del) Del "$REST" continue ;; list) List continue ;; compute) Run "$REST" continue ;; plot) Mine "$REST" continue ;; exit) break ;; *) echo "Invalid command. Type \`help\` for available commands." ;; esac done PEGASUS/PegasusUserGuide.pdf0000644000000000000000000042630211443145611014611 0ustar rootroot%PDF-1.3 % 80 0 obj << /Linearized 1 /O 82 /H [ 760 368 ] /L 142530 /E 41934 /N 22 /T 140812 >> endobj xref 80 16 0000000016 00000 n 0000000667 00000 n 0000001128 00000 n 0000001282 00000 n 0000001473 00000 n 0000001512 00000 n 0000002106 00000 n 0000002328 00000 n 0000002377 00000 n 0000002711 00000 n 0000032243 00000 n 0000034920 00000 n 0000034998 00000 n 0000035852 00000 n 0000000760 00000 n 0000001107 00000 n trailer << /Size 96 /Info 78 0 R /Root 81 0 R /Prev 140802 /ID[<872ae25ffeab6e4d751ff129e5437000><0dc2912904aa0c56f9814262163531e4>] >> startxref 0 %%EOF 81 0 obj << /Type /Catalog /Pages 75 0 R /Metadata 79 0 R /PageLabels 73 0 R >> endobj 94 0 obj << /S 330 /L 397 /Filter /FlateDecode /Length 95 0 R >> stream Hb```f``$10 P9& f_dbebgfx--k(!^l4Y,XY20͜63 2J;y_õYDœ# 'rV)/呔Q^HMH px鈛{p!+o<}Or::pt(%JR  .B@E"P,f [m \Ŧ\~ ,k vfXԼ0#D=K e endstream endobj 95 0 obj 255 endobj 82 0 obj << /Type /Page /Parent 74 0 R /Resources 83 0 R /Contents 88 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 83 0 obj << /ProcSet [ /PDF /Text /ImageC /ImageI ] /Font << /TT2 85 0 R >> /XObject << /Im1 93 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R /Cs8 87 0 R >> >> endobj 84 0 obj [ /ICCBased 90 0 R ] endobj 85 0 obj << /Type /Font /Subtype /TrueType /FirstChar 32 /LastChar 150 /Widths [ 278 0 355 556 556 0 0 191 333 333 389 0 278 333 278 278 556 556 556 556 556 556 556 556 556 556 278 0 584 584 584 0 1015 667 667 722 722 667 611 778 722 278 500 667 556 833 722 778 667 0 722 667 611 722 667 944 0 667 0 278 0 278 0 556 333 556 556 500 556 556 278 556 556 222 222 500 222 833 556 556 556 556 333 500 278 556 500 722 500 500 500 334 0 334 584 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 222 222 0 0 0 556 ] /Encoding /WinAnsiEncoding /BaseFont /IEBDBG+Arial /FontDescriptor 86 0 R >> endobj 86 0 obj << /Type /FontDescriptor /Ascent 905 /CapHeight 718 /Descent -211 /Flags 32 /FontBBox [ -665 -325 2000 1006 ] /FontName /IEBDBG+Arial /ItalicAngle 0 /StemV 94 /XHeight 515 /FontFile2 89 0 R >> endobj 87 0 obj [ /Indexed 84 0 R 255 92 0 R ] endobj 88 0 obj << /Length 260 /Filter /FlateDecode >> stream HdN0 ~ C=;͒T Tv*4DIӲJL9؎>ۿJբ>0Eo8bhqHaL^9Af07J_A4.mya<Ҧp/\3]$'nBk5vkKN|8Y2sQ`vU'EYI:fnT`YՍTsH^XžYN >k̎Ci}>._H|n7';F/5mgK_a\ endstream endobj 89 0 obj << /Filter /FlateDecode /Length 29441 /Length1 50968 >> stream H\T tT7$7<!@H&`!tBAf D&B谈L!"JR((K$ljH 띁RwbHd\qer燻"#:X8oVf˾,}zJOUPgLc'W 0o{ieC:wm-eFMXt98Ә~^QvzN,wWzBFQ=3 mx7p۞s8o~9rϬbOU)lͯM݁h%Qy+̼;nz>~4"Mz5rT毧h}قf} q])ʼEX&?aelT`51b+K1c0Zl:5:=l~y¼Pl~~a~$xqֵۋ45, %Ϝ9Yh"2EQLg/[M&—mF"zB0],&>j/3ag-EBEi@8 q F}h=/z YBGP2~hrKcyw [xQWlz>E"zer_D9VcnQGD9+*j7h#5i"%qCʙ2!O,U+-W|NQ|ds9rxpob3W8pH gN~K9iDWGIw,"FE]zI2Ŀe!"egU#2ҪdV}GWwmPA:uw{|͋gh.tG gf]!>P)pg4IרrMK 9Lt  Fq /b1SDL,2D]d)'b9GVZiSIe0`CSeY\V''o-rrKTk5:u OC{FWEhy"%xRE= QT+-CP6%{}\#~CeaaH:dܯo{z*.îun1Ȯ]:w!}xXhHp EU $:LfĹ %N5*Onfa Y+=ƚSO3fCMR9thе&d|uxV_@sDfh49pet=88) !0fDL""C8)æg8h=ß!c"#;Ȉ J/ # tJ!wUS \ Ez{Ӑ| 7È|cQi:4?hƻ9Gv7?}ty39*ndsoCpvlJC}ZCAwh]ŵQAF [ IӎWiu T v\ iIt&86#4&N4;>`2wϹ_{ν+\&D ;wV77%aݮeʧTŪ+H/ & ۾+:I j]R5YmBp\DgȠ"*FIʎscoStmfdx qry 5WXCYn O䤬!3S2{W0J#, ,-5L˥L.NIu)IpzN=޳ .pb$ڥDR#E(5en|$ϨUc&IDUYJX<`Y9@}&̈́\4a'e9BZ7Gx5~ۦl̋f11Qm @"LIHcxOn;idv2QbyU&mV;Qc'-TFiF*G|@#TC@-' Jfc ؗpۃyߕ }m01}?JS  68qfu/bL@ P`n¼W ǦĆ6~Z9l$F21 [:98f]q䘊2rO zA ba;pT-p⸊cs±&}yUZe5;֫v|]tcq4Q&ߧ=Zͥz:tXFAŻ{y''\\+Xg;OO§ꭢ_vz,`7^[? ԋz;v]49'_2;o ߴs04lijVc1ʿt$pN }:׹K-=ܣmg'yPޣҾ1iگ_ 69 Q7P-1U;|>{At3 |'jGgJ{&"J`yÝ:ZAZ,pN+^{}{8'i)i,5I褌 [Aľ bvxͲO&8!}!-ž6 H/P'DFrO)+b[oȯM58sHyEkzƹh!nCa\Za961b9#hQAu8'1aC#w &}zo~#px`; N;ԬR P{,HQg_BmM[CE%}MbmM)M1&V]T˲H9^Z'ޖ@w^}r=U_C_[iց-`O.d&qm(oQVܪFcst% Ci_ \6y[.r9^ӱ:um1ZEd^R-WhR@YFj()[Hq/U RzDy>iKm)gOwC]C˘k_;u๮h!a(NYwRvXei!͆sz"9=A@҇kt<p)?o}Nh^+ɧ= ANp?z!VH#n[vh4mxxV3D&n{V3PHYet6!g;i6C [q/?3dj'׀hRZLj2`;{`_@BpϋϷ=*Y2js-'k?;/}B>wH&ޑxMUDWq| ΡBk Gy"t_?ItC߂DjߕSkW`+Ci6^AznAA $?Ȉ:_Fos q(p4olLoobc<8~wcNQHR@ $^VYJH :AH/Ӫu$1AhڦUo5sBd֭YjeLn4P4 2y{޻{6&Sy4ey,njo"ŸjW;1Gg#)敉3*l~,bgM??'/FKofcZl}茝!O#DLe곬Ȱ3=?)3/k9̔>P'-O;<-/ɖPgySL{nax^xy˾#'9 CO⼨%1*uu;'">wAom!$Ej8#|#ϴ[k&#| ;c!\C[,Ψ)-([Z-RLYAH,`?E*Ə.SPOϋbѴ~n]ZצVݖ֤4Eimi\FRy0D!'DL2fjTɞ(G$(NԨD6_ #®[صl?,F>/XWY!s`\. Dt~0&]>އ.{:ߐ0UC^K?o/T W9f;h`0ºp{\l.j w&=Je X3BOB goŝ 䡝+ 9췉r G~ :@0V߮i2$WaqrU蟓2Qw*g5 g`A=L \xŅ{?Ņ`qya/,.OOxOWKmPgY tQk[]mQ%Om;ҥG˺k,8@Cv+WCe[DSKMcjXMhM|Z>V-Imc["2inO~,vG {V[.W<$g57-,& >h `^!׵EL&v򟤯兒tT䕒4xٜq|)UĢ 2mmaK<<Ģ1ꥱlb$m{td:tgt..ԇ?p^Bp1ⶹm84zG!&.(1˲$1br-fXun|JF|[ }z֋(~ol6{]gi5z/+-KKTK Zxݫi97OC4=RT}yAȮ&;;]|w;cǵ q;q%1_i@KPPEq:A d-FIC#J0)ݪ*[j(MuZM)bEv}v`[g}|8MŤ~Y\}MV}w1&FEfKK.tɺ0.:nl#,fT.[ '{ "ruQuQ#Aae./iQ<.#ASUYzD9! 1~ 9)D|/󘏁^pބ)̘o9WDfWE.x""RDA \t.ft%+_3tĵL@3_o^ G{9ǰݥ=Vƽ|I:Kån|Mp/0;n9C(?4^l'o+dG&Rc~eK=\O q/МJ9). ͚jb׭)jMŬIoK+Yp$yy1f: CP5~]WYTǩXhV= ͏Q L,dP`afI$"MCivìugZlGasJ mHoB*Δ?١gņȠ{X]+vUy(6;RD'/Q4tD2~_*nD\(j4Z-YӌR;pV(Y6@)P8J4)[Ba٩{rp:u%L4¸48W=5;~kRzvP\f[虣rϯ}tjcgwJGwcӛչ>G_J0㮷4_HCMI[#\g!CC,Y>|hf7p%UPC984 ZJF6j5@_%IyC#b̆̏O#Fb| ÜO,+.)Hw@;G#LxxkcíppiU|7>X)Ք|g D"fAa# y0O%t^^̌lŧ]H]sα`#+v*$7p$R׶㏥U+_Y{JL VF[&lsL\NMUxdbt9,mVj*,p!ɧbT7 6 dJ3r \̶@ڃp+3c(7LOIp22F࣌奆}{Z}Yg{z玦K'<\NL uuuMttM!}wBR&a,$ ./D6杶9 HrR*H2cqJ-K.Mi$WCg.y#߃Zo "p۴5ꔙ v}b¯-'Kq~bZ? bgPpjlK t6-GCŨɩ_ ˠ' fE5C S>5sa.0ى0F+O`n#rx9 7q2W*5* Չr-p#5n/UZݻŸ-BV^c&Vl:ܦ Mm JP=b COyMs>()/f QAJ%{?:9[H|_BNv^L  Y3oqo(kzm O/k٬<7zv]N w?.A>>ž聹fh12Kmi`2D/0vD_`&,Jv"9&j!Erƛ \Ww|=7}G `=K 0VQ*~}e|ts87eʕ{.>v}0i b+ׅ=gV9{\ Xl)\1)s=c5Xº-*,& wfQ6' 'a@Ͽu}=[ py.eRF$Ŭa$XyUK)^ztW PT?쾷cYVW"953jpQieA f ib4*!MMcccA!USSMqtFԖN)4td=saf^Sb`R<Y7uR8{4,^6PRUinKt^V3iʃ.3h<9I˓^EkmT!-qIծM&.Oq?$]tv{YV2]}b5 n^ |pZCaA0kg:Boo;gեWԽ~[ڗ{g'R>P)" 3]30XE[rG_z^ؒ'GdIKn(X-(BTRh"CS ˵)Y,!|7VU6/߮bnmRń RL$E t:[cN,-6T fhX6[CkSep7Dۦj=tVGEM`Gj5`qQ+5zs(axpU@`+Łݦ0DmN"9ѥY궞9eBlia3{/^nlqCvpa/nYdt]]Q]u@p?!h5g˥5!dY(," *8p 6"ٯ " &szY/F"{XM03f8RJH4&~%P_LfDfضe )f0 ߁;;C[Iԏ 0eP: 1w䋉[+'2|5stN 9Sc9gLd:cҶ1ɌƤ0\ƸVd{tLCMׯ\Ӫ¹9H}` {]إ ''hRt]424d h?0ru2[2R)}aOj!!R:&;ïz\n냡 (bz1umi2nH>jy)$8I4J4EcZ$Zbfu,Y]*QGF+=Y^o >ycxfZ8hk;[nv5D jnSZv;QV`yO]"@I&ӲeiM2 K:%lb)zPA\v)$+JDrj]:g3SuB^Vïٛ77G<) ky e.L2"F6.x02<3@ ,S%]$` vq6Ł4fJUuA1`{؄ L1x\`IĐ>vfn`TlPQJPoWg)-[96)~]_-)whQ7êW|+OWy8w "MWLEcv1 YXĨ& HxXmoH )쫘P ut+tWahxieNؽcXUCc%%+ЂB?vio=W_|Nѫ N'PPA!E$jS"zj$0V^PfVKzņS9>EI.$q\DQRO c+LKѴ(k(Hy"T"D"wdNix2>~77\| ha2 TRyAE"l zzi)Vپ$5{\9‹aeEyDtH:.ѴTܦqSVC5/P;&4Fg'FapB{æd ׹F3v.|cx^t{#nG?\]5wΞ[Oyo4oJξK.%k^ZEmenk<}25bMꠊ%vfA 0NI'<RtBdΘZ0#,kυVG6譡P_^`1 yV6 nI6F^&Ɛt`qB;1 W}PTkw]eYeW`WX2DAEtm5ѤшQCd:D3DF,!U!a6Ccc6A3į6g}#3}ۻw{ι;Lȶ,cOw0}%j#Mk=E8R>R)!Yヌm6Bu8t,U9WPW ϯOI?;vxf#U!c̍3EpJtq&07v`쨦C%dS Ӯhۙw5 wS /`N4TS)S9NH9K9#Klʀre yZ`. ,2ġ;v-{!4p51s<ƈy3B<%D߼A,vr(K{&vy6GTTne_9,s220p˺435  @Z;; UY/M+^^Ե4*Y:OMVwg.oQ-c%h 5Itvi$1)ԉP.BA}kXdM>HSG~Nz Xsyoߺa[vOY^-[Gw!FYbUF^S ;32iqeڨ|ȿ/\SL_KItD)4ߴ83sj2xIEƒ !XF2nG>Ql樏c-%@Z5 ILH6A3vهtn1wFs'0VM%6XfxV$Z;$Rpt`_W~ gp(=K.S'3:SG?!g1͛y;bAguKwy#q2? Ȃ {V8*1w}[Oj lg*r7(ޫS6DTIqɥUk [ 3|ai3$ {!MOOKC/-!d&waVn;+"/H}X$5 '7d#{Nfpdקs"bd&G\SQ$4iQ`GlKT ̲ Jb<Ö ucH(ls~,$! $ &w@Ke[jOk;M93fؤǫ *&Υ O.;x&ksVM:}psǫ~? ;Kȓ"]KGlCKzjO#÷҃NaG4YCӅѶD_Tsil4~9}}}Ͳ'-1(XAf^<6:W]Us 1^&x9@#wR\'aIfH%t QB>`DVC@C:L޻<}7Igv|Ѫ+/̯;e<{;??jOpO fpEMZ&p@: F(KѡS;ɣ`ݾ;}z#ȿ!995& 'qB"y`ru8@nKJcGP(M8shPلZB5۪Ź;fN)Ofi]Ϭ'=4p帶)W߼r; ^fhBT{%IX  I"CGK g&F3*OZHЊ=ckϥ 5P?wioi6E ts\xG|&Bܦ`j GP/g}\yza292\6\6O1;!_W}lݹݽ>l 9%GY NCK[ NFJ5JMʇ㘶8VP44)JUT#Z7w;fޝ~U?8$͞ҡ(ɠ0ESq!3M$Eቨ0.N@T$7fm kٍoAy^+rl,Xs HdM&^#Y+Se9[iT#a[Uͤq Ri4[B[|xj?Q3çJϗN+xOP+C_ط;1gWqmoGΝiӖ_zf它o>ڎp/ ${DER֧ֈF  V(yWDY .OT>,*k`^w~~3it[ XOs<-.FAC$\04Fn}tkR2P\{<z !v=бE [y~7ϙoMys,!fj \FlğHBE?\DcyJODG{Aι]=[ EbŅc\jD}|ӷo:0N8 jVXV>"YB99G7eF5gdVC>gϵbUEqxg&"L,TP +TO%2&$C;LuMt^2"&۠+4y:~be|9Ƌnt~=㜑 >7e0fXd{ d{d;W:Ȓ/Ā?5DYfYբrK{oMY:ۼx^W]28?T fuê.HX̪|?lus7=!N %vf)|b(l &Ʀ?Ĩ01YR췵PK@ /!l$c͸i9NVjcՋ~X/,gj~iK/+ f%vD̿* ;TM7MSw7DB#xQjaf;:GI ƺ [,l կ۴R(% Kcܚt'B l<مĭE@? k|}oܻO7! 2} ׏ ys^:o ٞ7ۅyl>ܝ_k0 U WGq 4}x6^To0=^Z}ܼ ?n,߹*_"0,GBzEg<$N2-F + E**LlV-%i$!NBk;n}ޫSƀݝ5dbssBL~BɄ A8 J^~e6 H)pYGyؕpFn0>|=93k#πBDFޞ*=%֍AG&JSNg"#V$&l='WCL)%]WWFZV]c=^[Xqt?)I t*kCU?|pƤ KZPO }3#^@`EYh:nOBfB[ha3 l}ڈwwNW?k~ΒSx3XW}\;vcp N`'B@E*Bv(:6!˭S0(zPiof,c=hTW\7GSbNnkm6d=+Jϥ]?>4xX|UxUϜ__(Pr!ġux6q{gߏg'1vbCI\4 xeP 2ڀmZ݄u* ?"XVNmD +Qq}wv`&ֽw3º >wLR!K#@MK>8vm@6|g*ݨϕF8+>Pi{jqםY+fWxQܦ^UV/7.)!MS4Erkj@&(h$P3*7(A!fii=*lUO ɾ$0[Rqk .askz Ts2"MS)$8JNQkt hA@h$>p 2O8PbFa3\VNPNniڪN{.}Ot,zrldhu9@Gw{X\wyۛVs76?nfӶ\:c7'L6Jv%nTz*x{rƄdGz[^,X BOJ] ҥMe/HQMN$N5)K߭^z\`mPͿ=]١nNv{=~ugG+XQ%l܎v-26sS&L4Wn[y9.OX:R$;\eFbub,f.e4=8QwSJ~ >c1R*8ǓXy]ԡewpQ,8B+t`=O:eutI4lfjQ-`/NgBN^KEZV._H{!2y2î<ƅ|f#􉠌5JvSaMWmi;MMYʰPnUM,ZRx~gώ}\P}h]Yҷ}+}}߿Vv߯Z+l^6tVjp@E3qk[:g YBE~"6|]-J  O@ν񨃁u2N -15a$4\VI{w 1WVpKΤe`jZnER gx1d> 1E ^&]&^an0L q I%9(p(P*U4WJp( *a&Sw7S.e=G.<8uk(p`sG Fp]b(IՏԫi,yr'YODMIrBŃE]2_ F :_KHH%Ic"R78`n&%>rة=K1=I F!H^H6C7R HV[+@w<gqQ :-N`@5/j@jkMa.6ww dGwe=?E&Tա}UY߆[{t# Bc2VVirz5gZK}.7ɪ(Qn'c!Y(/3VUZc$d(ĠUհǶ67ka=X!͗@PQPfزXq:˺Fgg! !XDb2 KE1fꚬ(qU `DT+%Z `'K5pT}LD8#h0?#"A' H(hdԚ;&(I֥Qstz Jѣ$[dk&tɂ* ''>a ™!NW/ ʼ(a*˂ɴӼ_^zhx^uE?ֿ[ C9Ub4ZuwxǨe(v;'YE1an8A ,D%WnsD>ȭF/xp, >E%KK$yY*Lzc͊x#G$ŀAl9,>H}Y'q}j>YG Ԣw)A."1`\ 8m#yMoNѿVKILhzhw0dT :#Ȣ];ܖl\(88"TvpB{nJtx{y,trA?=[.Sv8 >t%#)DmeDQċ(I#X V hLKZ؋_RX+Jt] s$Ugx}cS>[[% T;w{~~jNY995|cCZb8$f؀)5H>#Z6lGnu-;'7 Uhzd5k:7VO5lbBX㤍;&N:1J4J< VFTvKҰdk:;'Qd{$P+HvYdpi0{Vʩis,V3vJܬ Qp{Dy=rNiso1yJ޸<[% JU1hP,13'VTU$QgG=vִWYD '-v?u.\ ^oJu*8bLFIH $S`R_z`OE8:>9b(*@s( _L2%4UG-?Rz=>-'  !-Ut3V.'vlژq̿ J}Жt^TJelk[f{$1u?iaҟ|Z̄ w>J>:#ܜI߃n6 A6[%mei6[ydM3(5&k=}bZ*07+Oߕ-)/+UU.Wv[q6q٪e6Kn-f|?T\W,n@rx ^ri9N޴-'Q(t<rb2a@p&>U24R.e8KxT9#h 0i!xde`z* sT!4U`Nbѡ>q^)95fEkU2O=~H6JzU$[dbX&U| $3H2'[[ҙs+Xӛ돾nQovڡ;CW&ooz.]a7*@U1iLd'f Pm8;Rpݽ ^~/.bSҦhiZ"-gsCʡ+E-l3b[Z#nO=x\Z$s2cm,bpQ Xs &B5BSv:/茘AY 3b4B 8HU(Nu:A@eֺƜ:*{Vw ZU}FؒB!5|hPz/oz__7;M8Ö=$8vK;+_çW?܊=0 #CsŘ8UyʌwPcu/ݯ~YԴ_· yr@aO7sf]8wر7 أY"tݓx"bSHE0QL۪6kW+!5  H=YJwBW &DHPl”0G!^$+6Q_r\B$w s]w\x4a%*)XG crW~ZZ5Z*KCk θrRۥ98ryAiEZshEZshM<85WgDһė*'^Fiy9: "&:A6Z)VmQhI6>--:n\`+C`_"Z-k(P#<Q&ƑK̩OEƾ_=-=l|m³eE!{{﮸yzϕgN} WCPlqɁOŠ]ʣ4E986cDm!kjG9ExE01qp ١ $!s!"g[EtNsFȶx[a?Rd% !NER-"BozP2JyEqQ&a))Dl `茰GX(E:R`fIj T$c%c; [6bVayo7$箎~ȳ>ͷPWNkꪪQ*S5!)+vTS;4V.2FIfsbGbG|IHlѓ ͒AzxT#4Z-˟bxE4Ks9Gq #.a&KqdZ$<=PC3:G[ svdi^6 ®7)x=лgqRTdck!Xܖ7xeeM e  |AJ,q\b@# V 9l/g!ċyJtہO&[CxS㤣A"5d%_YAN]]ݓod2|"oz UX-ӂvʣA[cDruzQs2*jڭܐJHIP6$26S7bql:czQQwI~Uzm0-N捂zb eU2ǥ0L,+vA1õ>B?WCWdy%L(3>{}=1 cQGb>8w "7x҃ 逖CvR$N+EM֡\_O=bG2wжlFZNo8ttZɪ%7[߼yޏ~0{ tAuYKbPJ0*( 5xQVvv⠹sF8LPgyLT[&-sLm'jD[A@MT[PcH& : MN<)rMAlby_p8EHp5Ա櫘 oo~ 5xL9, YU1I/y6!!0Bxǻ/?2 W>gj9S_]OOBJ!a?r+۱$Y҆GIDAaK+ )UX hN2GX$TBajZXBK9).=MP$=({]leokg榖RYzKҕ@? <K2Cjim rrmKJ% PƇ#.|w_:svt$nY?Ά!lgpSB#\EЅBxyGV&*3 1И Gee^C֪%RIk VYmqs{w54 2F 5¥TAWR;۞ܸyt\wb|Evqx+OT?0;OS8wtbIzdld9N3w_^o uYn[x $, H<<.7Vk[ā9v'Q4{9*V~VYGVC&tatX )ޡB*3=:я+?saIB>f[_6z0Y SpNQS`P;vIrzqJ:;箾ppѱ~#DVK1pţ=O} ( E]N #\5pN":QmnhC&[H IL6MΜ* 8߬sXQ=,L~)UA^ m KQ9 4Sqhfkf鲙gO[ qwVʹt _.dF1]lҲ'\-k+[q{?lj|)B2,y&Ij6M6Mlڬmr7wcxthLDg3/%K(y}ĩ؅;phH5PDE4_iȺBUBX8+ଥAGlX/^~E8B9}@ =:Rl xa.WpӠL0| v3qiSpM<hq E sRkzMI0v7wiU#+uV,8*Sғ|4IT'q+0pd~Niq珰(c*'-eH)(.rF˔ڡ"KQ#`{=1 ;#Bx k)鐏QU詣Y$=p'^. & %mv=S6>11{JbNť%l3HL~lH!XdQ8b 'dIq~TgrHdj۶mhfwx,^J*S+sAgy1F"9eN:vm޲x7g,Zֳ-m:ɿ[/n%XݱfF[\6o[]Daj_ylz2h-Moit{44$=E@v-tl rHTMVFalsxk]^C9~ƒXW9fY|.s:@ief9W!/zb#! 0PrTX*"N('/_S}ӧݩD蝷+1fG/R. XVxr'rVeV̝M\-j'|,h,h.hb't@c='';' cT4&|dobp/91/CٓE *^,-=vOCU{fzN98ns؈,YA&8bH2/][TSpʝt@Lڔr}zkufѢio}b7|)$UnPt(Do(:s9?:$ lY̘̓R7}tk?0cvVI6gHcfZLwؙ^EV Qit#B4 zpPBIUBjx1Y"<-«H*nDI(nq7yɴ),!&CCt ]n?tO*E@1R?uF3ww/ b`8IiX D!!T H<hRˏQhRRQ$nTWPQ~wݙJ;JI5^KG"GLl#E1XΊģDQ,x31oe:ꢚ E!`L|E(X]'5$Eq F"BhEFD8g~Tyܜ+ !FOHeA0ŁIq) .zQԊ;MWcYg/snٱv7С":23 g Z1Zv,`QeO}IY w zdyx#虮7*/ArI\Gĝͱ%J,k+~f,׳΋N/^{Qsnq8zOZܠ-𴷐S2YF_D8v֎5':^:eG9>ƒ ³O5y[iV"1mvuh>'!T*^JPb>I4 RyTjpoc=rDZ3?Ycsx8uQ ?0СӸ/uj`TebD,vĠba\*TN;;aGr^=} 5Rh݅x*~ԋrI+9C9B*e ?)I.BcI*-"b#J ^@Аͪ*ia`U5Y@m̘`*mJKUFu/Kq2$b_$߲$jDT, > {lj((HVZ!"'+#> ?.T˓bZv''>.B+u_q/" 1Jpֻt-)mV)S+868Q&QRy' @( 8MBz0 dO_yƒ?HW.o_qjM}WeXWki_f:FZ$SʤWM5q5#+[J4lN6\qe#}e0x!gq*Ev-wYolE"{s3 xKRV5a6fJGsYS=kGf՘03i=2~~:\Jp)?sCY@yI[!! Y=<}Nx/f8b*Vڭ>KZm,b/NĆ @P.9t'RXyDsҝ[() D$&?zMjȬㇹƯu+Ё,j\CXG0 xOMx.y`a@T7G1/\fng;[mE.Lɶ{/-.sp+7:K8Ȁ2V]VBc.)E7!]$O+>w^KvՐbUFI`U;%c;)6w ʔ۽ %mOh~j0V؎nstgXBˆeC@L~Iޘ~\SMa\vƐp47> {lSUǿ;nw쎮mLn@b9^lvcc ԁ/DEϐH%1J|DŘ!N_Q6-3?i?zzr{=Fg/$i-?51$l:Âl<|TΤ#Z+.Çbg.[SR˸VI2*ǎלRh#N7G϶ l[ƽ{Ap lCtgh)i'I:LV0<w:m񀿴\`)[npZg!thU!iQ})ӏPy&la>E{).ZBJ/9=5Uawf9+!ӱ@M1BIJH4/]lH{M۔șC{Fkk 4Ƌ߹Y6ϺzehvNJw2$;S<1M{)|FTxQf2{k̚pGؿS> ș$OKI4.NiPd3=brVv';6"aDcI!AP9bFi;jKxHtBynmZ6fsj 8hE3(P&P`f`K oڑC_J[=$?.ay[22Mg%M|lp! 1#qԤ,i$uTHiԖ.%-]gr >A$.i.J Бd/_&^}H '?LeHPV">LeN1 d3YH, N+ * ;K@I2p+`q|@L*>|q8ei ԮUM`^h<4m"Q ˀN|~[zwHM`00Dq3B:?;љ p0h%f,ˆYQ(ִtݑ=wr\8\nO~xa5W2QU]S~媆kW7E\ޱh7u—OR"yP΃AA;֢ Q8G45C͜*"15^=? cїMjLm$+C˼D-"ns$q[U [${}6=rhwlD-PCԡz1H1ڈMNV/SNS; ' JFn[(G&'؃0@XzWq+p.BU lMDrqxرrEW/?7I@sl_{ endstream endobj 90 0 obj << /N 3 /Alternate /DeviceRGB /Length 2575 /Filter /FlateDecode >> stream HyTSwoɞc [5laQIBHADED2mtFOE.c}08׎8GNg9w߽'0 ֠Jb  2y.-;!KZ ^i"L0- @8(r;q7Ly&Qq4j|9 V)gB0iW8#8wթ8_٥ʨQQj@&A)/g>'Kt;\ ӥ$պFZUn(4T%)뫔0C&Zi8bxEB;Pӓ̹A om?W= x-[0}y)7ta>jT7@tܛ`q2ʀ&6ZLĄ?_yxg)˔zçLU*uSkSeO4?׸c. R ߁-25 S>ӣVd`rn~Y&+`;A4 A9=-tl`;~p Gp| [`L`< "A YA+Cb(R,*T2B- ꇆnQt}MA0alSx k&^>0|>_',G!"F$H:R!zFQd?r 9\A&G rQ hE]a4zBgE#H *B=0HIpp0MxJ$D1D, VĭKĻYdE"EI2EBGt4MzNr!YK ?%_&#(0J:EAiQ(()ӔWT6U@P+!~mD eԴ!hӦh/']B/ҏӿ?a0nhF!X8܌kc&5S6lIa2cKMA!E#ƒdV(kel }}Cq9 N')].uJr  wG xR^[oƜchg`>b$*~ :Eb~,m,-ݖ,Y¬*6X[ݱF=3뭷Y~dó ti zf6~`{v.Ng#{}}jc1X6fm;'_9 r:8q:˜O:ϸ8uJqnv=MmR 4 n3ܣkGݯz=[==<=GTB(/S,]6*-W:#7*e^YDY}UjAyT`#D="b{ų+ʯ:!kJ4Gmt}uC%K7YVfFY .=b?SƕƩȺy چ k5%4m7lqlioZlG+Zz͹mzy]?uuw|"űNwW&e֥ﺱ*|j5kyݭǯg^ykEklD_p߶7Dmo꿻1ml{Mś nLl<9O[$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km endstream endobj 91 0 obj << /Type /ExtGState /SA false /SM 0.02 /TR2 /Default >> endobj 92 0 obj << /Length 780 /Filter /FlateDecode >> stream Hb``ϟ?ǏW^ݽ{.\Ǐѣ={}:##c׮]/_nll\`ZssѣG===ܹ,>>8::ʮYyyyϞ=۲eU]]ݺu딕SSS_+V011vwwر:::fͺy&Frr2 hMMy"##ئN*))y/^pqq m۶ٳ133Қ3gC6l //^RR"..dɒe˖ۧ_xQPPɓEEEFFFo޼quu-++{޽{o߾hѢO>暛󇆆VTT>~ҥwnhh}I.\)&&6eUUU''k׮ ̝;7))i[n={O~~eXXXOOOPPPeeի/_>|CCC777ܹ3"""--ԩSӦMq㆟_JJ̙3;:: 7o|֭ڐ{xx,^xڵ}}}]]]mmmӧO///_r cbb{Uj endstream endobj 93 0 obj << /Type /XObject /Subtype /Image /Width 337 /Height 142 /BitsPerComponent 8 /ColorSpace 87 0 R /Length 5762 /Filter /FlateDecode >> stream HWCBh̡2\8ACCdp1 6 Q;u4m4iӻuWZ] ,f /ڙy7f޼AJ`ٽxf0yK7v>N u$&nCqqn?;1pΓ|QN@E1 R8v<132"$4}"'7qN%, E *XNb1YI,u"K)O+^ #"TV7f|T+{2 - ~ܨjP?p`*e> ZA6#H]58f* e(cTTRuS+s*M0 ѕd (Ndlzrbl n4-9b M FI'acAo?i.^PSi.QՔ'1MbNݛ`d pN\4mp\)`q$lvBk kh myppGr\$3lL=.ܨb,*ԈrYӄ(L*/$PFj$Ϛ /.* jhMxq>sl^S^vB  )vx'cC2iZ~PRWTyɋ| $/ő%ukB[V۪B$DE3jU;P'5PY`(n;Muq5b%rC@ITgK3Uw_iQ.>X}9U "PI)HT EP5gŝpȣ`spo1R=X1-9z-#\^gʌZ ӑ]b-R&`~ICJ2cUjꍛeO7fSlO#KDkTPo{^qPNsr^t(t=WM#|N)Zrv6GRPI=!&TIـ 9ԕI fƼu^[$cwϜ$Z -ffgsW84O[5qֳSIGyITWgȞ+4jevDkH:AFg1wdE4ǽ&͂\x&G~2s7zrՐMȐdK2Up[t~?3lʕr]]3V{*z ??C+#hbbEnC"ye=̑¨n2)R4WldaDlGIч2B5wxVkd1Nzf&{0-η-*1KmFYw:YlM پpe@3IHaSFeVF/m/[b0GY;ōfn?!0~ &{USo$&W4\iXÂWLe-? Q]_)3cw8xC&-Y,i1vM|E֋aZc|q;E7p}>B8cs*쿧IE}St/(!ˣm]wf@Szwo\n6F-6P{݈c?|@ס: DrPOR|TU9!KOM9K>vY%m}0)"6[g_ʉ<Lj\& Kgulc5W\qr `5{_s/ݽ}TlŏZ_ՖR'y䶼[L f#* :[@TRI[wu?s>_$,Dќ`p)Y9é~իHe|ڿn]S?nh&< M#mhG /iSFhgڴB6烊^xwcԍ\#J>zyU&Utȅ  eݿC4PVyѵY+&&b_Ke]@$zb2@?Xeh8ۂV m i< cY {w-֞E:6ҁD.S2OtRRXE{+j%35g+-׊UawI䁻c֯ SYx{c3ySOM`2Q?),nput++x Z8ТhPDo20tvar {| F@;K!vlPJvg"<_7)nZ d#DP2Dz'*5%:U.OwuqW<+&=[G UсX`ᣟNb^J?4zTBkNm՗[76_:Bn<*T$j6ǧcsm VHΪm aL:??wvw2q|t[h1>4)F;~E~j!\gPzY dE]PrA T10dݸ'8f6;^qDPݾ'd 0*?2AmL c|K&%Y1>#Ӭ;PЗi #oH?eqgvlBpLd5 * 6r7z0nsME Ee5+96MZ#iV4MUc%Q5͋Bgܹwfޥ>wϝg>_eVeͮ}W/n̴D 7;/%): H9@.T>:WD4ցá3)z@>fO{Ak }nAETϠCQ=.p@}TtcMp=u$…A FE=Gh-bԑq)0xڮbcj^=,D@ qw0JpmyP!HڀKдTϊ#\2i&GՌ> Z½Io}g| !D4!hj5dWjboNP.HmPlq}D˭4OZq֌%:@qǺKɐIwЍ%ЯF> ih~9Q9+]^Y]pe~|Rb,l^~9B؁)քhȶ .S!Y7]_bs׆kk|[n;.O5ܶ_kBVp$I^/_[|7tr?z5%HRupM.*lqiPؖZRjS6j1 B` @I}a-n-kzFN3|J LʠAp8L'gqpP€"!5Z$AF"_MfT4Z_Bnu&rH^C"eFdO"ӀfD+J3VQn(9j4oEicZu|.mրR]l26@ijO=]p0ЪǦ/qjob٘z)wZ6&Li0,. t/M8{^#Uj r&m]?/Y*6Jol׈b4Eh#n S$[yl :3,HKrw9h!6;'(:ԮaKAo<@[V[ݜ"Era1ofY9\b#h0lb5qJnp$nѫff@2Q%s:!F3YKөeP|#6re$hЌ,x6Ϩ иTZRW6\`]SIOXOkװ)~qd.p7W-i#;mGep"R 8~ \)pGiUh7p%rh4?:"}7@> `WL~[x r7| "bPMyO{#@wPK*ݯT23*~'L*㴕,7a"l? O,s ±σ"3&;ɭ%ϭ8]Vq,r+כy\re3j5{uZ㫹=:xm*$`cG[11QXۤi; IUt?(U2G'eWNc`衉11\R0̄0D1[vLZmIX%9am ĠV=w72 *= + 38 8t0Gvh`@;ax 6a10De`8;nV@aXivðghG ' Pvp|R1ʌw@68 endstream endobj 1 0 obj << /Type /Page /Parent 74 0 R /Resources 2 0 R /Contents 3 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 2 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R /TT5 65 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 3 0 obj << /Length 1497 /Filter /FlateDecode >> stream Hn6{?/ <A+HwJՑ[n_?I"ɍ&[ Al%QH^G/c8_8L _iQ!DыA""F/.8Y#2.3N7 OWj"(^ #a d^2x.%>"u=4e]''K>{qqDRf E%> $S0dS $0007erz]Cܼf9ՏuhQ@@34P=P+Q|DZΗUUV7Ti?=r^,{3r@}byqGI9FgR'wZQθksfa&bMgR|2314 *1|sŇ 6dt2M" t)IY&9?<،+KJSO[mKVմ$UDr2C|^5z+"TN_37Mغ޵%ֶ2RwlpXI!Œ7@rKbղ $<1p۰>&Ppa"L̚ :~rh|>on6)@<*@Err墷\rq \fu?wh2 Pڶl]Ƣ9O?  EHgf TDL f3 a^XJ=Y,ݕudQVN2U;YȋVd+e(o| X\dpv=$NÆr:2T(o/)辂-]şfuV4遚"&MvIM ^@)wuMoJ JoݷRRlnXo_m?yЉ@P# ^ؗC=$:הy?f"yU?`E > endobj 5 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 6 0 obj << /Length 1677 /Filter /FlateDecode >> stream HW]o6}`+2ܢH;CŦm-dHr彔l醶@$"9ҿ櫁 mt !b\#E1IX׃c;|17/BpL6sƖ 1ډ!L<;p؎C#X󿏜t`$1,QŪicv,28s7Nۮ;D OB nrs5{]j,RH̊"XNVg"mKVd fuc%Ȉa{\۱|;>)Q0S;BWh1%^Nb,|Z 5L̾-fiF"R\'Gt 1Lu8JoKZѣ#l&H0 U׹pRQCiUyZ YRbaOx=# D٤65`p2(h'=.c!BD27 _;P$RZJ:JH+d'] Ky"Xe&W`ݬqbAYNxF0rUE }d1gZ|'Hc,udFUwqYTEZxVZ #Pc*8564' fCUL0͵ N[@+x2 L3WRV$[{J԰bj "TjU Mg蔷) [LiѰiuQ1ߊ2=@+Z k&$m̀>f }1%l]9+a% R`P؛"<="r UcF"6"WN%$vEb:҄aOYd |ʅNh aFǨߥ 0A`V\YrjgnI  S4(귶(|sMr!)@i| cǺ,Vgda@'tǴպ.r9߆ӊ^UZPКo4Z^|/UotW 8)K'WsԌH]́"H oO@F|4Uz\i5j:c7|amJkw,y +.z> endobj 8 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 9 0 obj << /Length 779 /Filter /FlateDecode >> stream H|T]o0}ϯoبq|v4R/!ڰ&.qB59 !}ι\νy$)d h`AY0 "koƤT?KIk@#yv+]mc#a[zA>%[5LWkEYOEsۺ*В/|#2өψ{Rd'iLہ,³{w/jFr j*fE+ULѧs{\43l/sO5Vk==O`4g"ceݻZ;.LoL> endobj 11 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 12 0 obj << /Length 1689 /Filter /FlateDecode >> stream HWKDW-#j=rn!,C9h%Gsuȫ !EmzF_.Vm-'OuMGj{\^*z>䫾h7wo|ߨm*HB=/~ѡQ?Λ&j*O6tjZ^eКt0>K%Zy߱\fIcnzc:&(^y+/1k]XŎj8R5&#+=SgE M^ٝzeXz!yuΈo=4ژ0uޒC2@ ~ml{+D2IB*VWn[I岤yq[~V!ۓtbC^Z~kO7~pRA7 է!di`Z,-l˔%Q'/ ?}E4?<`^Ur 9sݎԻDGU&*9Oޭyli`i8mU ;4V^ $; j缫HE`O P^.x_+ Hj5lRٓ b?QnV"ag٣: juk2Mܤ()2ۯ$uVџ7% |CEJoz::X_]x fD:a3;Q0)CQj/ѝi@#^d5 ^U;,Q=džy۫bR2Eͧ̔=rVdv<bmr},QcQ?ѕ$gD`#=#bEQY_2Qv܆ƖP}ve`"ԑvuS@c/c\p l92L2pJEzd,Ig҃XQ8PghSͱ6^@Xhr\MB}y?V,Do|V[v)Zo? ]^!S0nh%R]U \Aw^^s - @$"(@yHtАRTiTPQZ)s1^vWǎM6׷O&X9n3J&)}tA|esfGLOBzTyFF`py]Ltr }PH9G0%Z956q̖J@q: |@X!=6BA9&,6Zð_k\\8ryL.uS#8vȬ0vaj. Q9>;bLky21<яL{ALzF ک'<A);6 ogr^TȘnj^-v7s觀m(Hέ"RvDO0 4'HOAGhΏ3ս!"\%28bi4V/3f@O<2lH f'̓CjiznȞkW-U^+6;vcejO0띞{>μE%O#Eg5Qx|28#6Y0*ѡߒ7${=uu?sh뾞>ᕓC(%5)ѐ0ٽ#Sȝ5Ty w/1­Ko+f\z&|R-|g;3M bpq]34h endstream endobj 13 0 obj << /Type /Page /Parent 74 0 R /Resources 14 0 R /Contents 15 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 14 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 15 0 obj << /Length 639 /Filter /FlateDecode >> stream HtSn0+feAQNAD dR.ICa$˙&m Rh(MXA[,YC',˒CtTiUtD1H\R"ȣaP$-sqv/`+$pw _+h)JkVi͏3f*0'\u%+ɎuI? ޟx<>)7U5\@|pRKӂR#d*0&I@ktu6a_xIZ3hǽ~78*p $'*Nt_20w];f_e%46uiAQ8 !g);"s&()izawYu?8H͡Q& u:a?yCOF|nգٙVRC5ߍaCż 9:J`!MMSVwh %eMp3t'#H>7pEyҔf%VѢӸrv̴&Lus3Er=6ՙ(DeFSt/A*0A&Az}}\Ft="1M*a0gu7}`s+ endstream endobj 16 0 obj << /Type /Page /Parent 74 0 R /Resources 17 0 R /Contents 18 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 17 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 18 0 obj << /Length 1405 /Filter /FlateDecode >> stream HVr6)t,cҺizi&f0$$@ AYcO]hc\Ϸju[U)+G'gQC!6Q T6:lcVojGqP5+|Ȓfg&x6hlyFRzcSFzO|\(V\Bβ,xάYf҃Qp=o;7?v`pՁ"X=jG%P98Gb *|huڴhc {=\M`oޑIf?2ȌCw%R~pr '7aaJf0q=驃zzOB'"j©`c=}wNQ\ft髌:7:/uζøꦃ^`&.]X:vyoof=y^?y&7W)%6ycfhݱ°%ڀjw4 Qܽ s HIMgGBL\uJ~Vԗz t}D oX^uAN$HNʼ \4n0q@L`}=-K-FvytRᩤd:C'qbU`IP×Eacwfm~;AW[d4}*YҵGs1*.1*.URɥTr)^%_JeWIRURo#l.vuWODsaaa8Wbܧp]B"b[&†oͤ̓Nu+o:tBЂǛ6w: pݰ4}6$D4 DM f|dr ?Qwaۀ<#9Plڜ=me:MnfD[tQ'K @KdO7࿣Y DGAqV;De<9h{!_t{{Y;N9AFw0.&# h̸]/QU bj'6{QA1*J LJr|q"[9Y1HKqK,XmE%EAu@k2laG'S5EDջ=!GOpWW`-V@PCm%Diz@(n)|pzayOSϕ-պ?KYg{ ,/(L;C*l8emyy}  i% vAJkG #U,3yY" ω*G,—vvh8&oBY 2эPކN<Ogڭ^&ܵ`47yyq˞VȇZЃ =Hi "lw`&c endstream endobj 19 0 obj << /Type /Page /Parent 74 0 R /Resources 20 0 R /Contents 21 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 20 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 21 0 obj << /Length 2035 /Filter /FlateDecode >> stream HWn}WE~ɛw  144E)R qH8: LuYuԵZ͖\zx?wls|͖?{>>ofO3GrV;kّ~Ww.c\7g] 7#7Jޙ3Z ,pSӇ}flFensYԊJ5pu2݋Z[pũ q?HAw=~ZmgGqh8MQs+:&^7v'l!x?\rTCrv] !4o]:ClΒ6$o˧ѝWL!Nn@ۍP7l{7vN8Hۢ˚"MnΡ!T 6/ .C˷1K59o/1gF,wݡQP5ܩPߎ r)HE h$d!kT5ٖṼ^íx3G)̛=gw7jgY>ۮ+kN&9ǒ-BgH1D; 'Ή eQ?`PleL&i(^EwE|$'7j~ڏ\ Q`A$\W+`0[ۿގ5;p<ʊ,Qu 5B}K-G> endobj 23 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 24 0 obj << /Length 1919 /Filter /FlateDecode >> stream HWMsFWj/T7%6d7*3bKb,Z19hJSklv7=wɇ"IADu4K`FyQĴO>|2Zu&nUO>.M7[ɔQFɽ^ꧦ0+/+o3ے>6[b~z2jšYiRNt$ Di PCSd2y)RYZ+' Ĭ,5kWf]ҟ 2 1(ybFwUSɅOyQ4Tomcdv6 y8vw^rc _䨩D4e%!Roiiq坳!tq'v]tm u  "3:)Q'dvqAX Ō \3/tjT[hh'g* ?Jֱ-=rҏ=gi4_;\~P#v^=xa-n\nG[0E!᫴u SEfӳY *Ḑ*ft:m] ә)9Ϝ/ 9A|K* p,IoTzrO'bzE e n cS33\?ǯR^ViWRՓ P}Y75Rj؝XhxT! it6CWf!jjU~+USC\Ү=y9 rsDEEYUT#$s6B(6s K8Ȋ-}M$V̖"!G3222vut'zU ] JIh}8~ɣ[i۪둔Ga\λT$T|MË48mZ vcXnWb6> ̷>IJ7OԵzUAɧj'9ݷ%C?кډH e]%чryvu&FW]?pQ vכ0-@ KA[|ڢ[@B`t6Z'9~<6-xzݴ{t \Y\O%7/ZlM $hjwgm0e^씮Xbqc¯';wZ gP˶-OyȈ .VJIrx曖cϏ Ɵ^ ni,5+TlG{wMv1(C)ȋSu]NT큗V{Itr˜H77tlu̲7t{<ڰFFPNu׻Q|@p kI%u]0t(\?\vWQ &s>0EuCjB5wI{(J ۼ\yZc^Ѧ|gE(EwºVB -ʥsR@(`7XF@I R3R/VEVK_O>9YjΘ0n7KqcqI8. \T2"~*RPG@1&q|Zx?[tGvm޺'RK/įzJ=jt4K-KQ})<7w:Σ"cSE*kCeƑ)]1&sk! - W~]qa#*jE*Kq r g֙~\z|5e>n endstream endobj 25 0 obj << /Type /Page /Parent 74 0 R /Resources 26 0 R /Contents 27 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 26 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 27 0 obj << /Length 1767 /Filter /FlateDecode >> stream HWIoFW C9[']NE,r$f߷ ٲ9[}\/b^B~?E~X矺T DWoRl\ʲP^pPŴãz(vΆžV;)륎;vB~ "@cr ݉.zB+&~lZY m #EӉ$ֳr=giLGXE*jUQ@nK̿pd+.JQ5]6ik}cx Z~812\`R0A)M.v9-\SdG쾫YF%ՉX^a"v,s8NKA܇so=d#`~9gL ͳb́2ǤufJ™"Ǥ2YAIހ?M =&\w`a@[Zۭ+.h<,iۓ)%BhFs .'r* Jd!^-Z7A+8#zh&_ė" "? >Dt-oب[Hgu7%y:qRFA.&tFŀ[ 5MGģ9DH K !˝8]\fHشžKjĺ ᨇ~C;9:՗k̘W}vůF 1gAMTydCMgE8w'TňYoݫkɂpMve+MܠV|]8Q\_CiQSèx6!9|!nKz|; $6A7AnrOBi)+n] &n_[_ WYe,q'`hZ2/VE)D'.9-LZ4xmCyF00*v9K/x+UГ8t V`xy(7rJsCXR-deR{ꄱ\bfqV`ϖٗQ:\Ǟ<ړ(5 jTe <W\6( ZZcBXCi;A?Ige6TFw`K4-z3/ǡ4v^`?1eb;*,BJlQ차.G:$7 Է"mХ$^mٝ 5& Ee)p`⨍.>,wQچz,xc? lxx<Ϲ hM>s'&}{Lɲؖ-1%niΫ%,N4[1P;ᬾ3M^ci-Z sBI-v[}>1z?`zSVv? };bXIؒ;n#δ8 -ѦkLF<-/}>ns mDtf Κoò ~Mz1q#dB"Bj0v$m0a2kP9ㅦ>a;9AŇ s)a2!f5 nM[&oa0%|'nOSF>ôbŒCh 橗K)/%-c\piMXS쐛Ȳywqk endstream endobj 28 0 obj << /Type /Page /Parent 76 0 R /Resources 29 0 R /Contents 30 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 29 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 30 0 obj << /Length 1978 /Filter /FlateDecode >> stream HWMoFW4!_oIf3{ 2 l$R ))Ғg[`l5^UZiM{V>d=C|OT_Es?_oCJ2N7RӴK,Wn~JĹ}-ߘƍٮ'wzZ%?rXWK$R$ ʼƥyn-s}ڼ_Cy}vdz B?PnjqOP |)N C^iån&;|PdDwwU퓒sbj-$ԩ5NH!&qd44!CeCiCQ䈻"RCq$6g>Qd ^!-p(:oJ'f~{v>pyuZeq)ֹ[wz!I[oV^*QX)tkF =Cnۿ- f9}:mSRUu?^w^?!.xd^/P,@s,KC2.r^4DW)^c3t~s5^N s23q^6hHp;Bs Z,U7C }X%QUW'(qhH0mɣSU)Q^5T?1Kkֆ<]lA6Qv@!~D|> endobj 32 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 33 0 obj << /Length 1731 /Filter /FlateDecode >> stream HWIo6W*"E:[;= 9$Xڒ\-qʖg&MoE&z-L OdO u\%\U:n"bur}Ջ}$V=qEjWzb/lx@,~q'jcف*,ɡ|V$򡹃ۧktps˜oa~hDbhυ+`3N@d r Zd.k\ucu[+/gN,# Rb-iGk-M`N kQqAݓǔXVjюnDd%k7K$rc %8WTg]1vmUC>݊/wVC$l,X Rs wl1_b[5jg /!>Di& OsDKmEՈH2s`aHa/ @!e"+;XH^<_QQ 7K%\I{˙9LK'Yꗼ7'rGol-B_NhYx}MhuHxB꺔hb(5Ay1h e>-8"~ Qk$CكsS(L r{-CޞPͫԈ 8LU(lWH2h!P^PzleEB֘cNe`z1ꪩyySpN)> endobj 35 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 36 0 obj << /Length 1871 /Filter /FlateDecode >> stream HWMoFW,C$EJ5MҦ9؁@Ɇ*?$Ғ%AO- X%wv͛7SZ-:\h>sܹ|U= w\:tR#\VLc3-II7Nefs@ OdeN>_>1_BYI^ԙzO[[0˷Vde MeD{O2<^ES| wt"04xJ5L+ }46eib}6xu{ ٱE9¾uunxL; 2ukZ, *T0.V낐=lܟ[[NBOSԛWmo69EhE+&}qyw=w" ͉_fPI\&CEV܃ԅb6}m^|}lp %h{xzщqs=%!K(P§6c؈Hnw1%=[NNT UC <0R[o`\%Nehig\l{KlĈ'i+3vFc"%cUP붩6epQ\8qc94h!#ɱ`sx4զIUV;cW۸˯F}år.ڡ^gir9W_jJz߃IGCXU-;E:K?Jb3Df-2pͬ馌/[d"BcDqz\a= nlvTR|()ti/1dy|¹)L;^=El q}<@rJ*#Ac$=Jym5g1yf8 ۣ(9^"piGT1 (}+|#5vs)+Avk x$ȟ)e=Ն.mjC GKٺBsFuov4+f4}Q(soqrn4%8?+@0B^p-6="o\qeo):w.KoNs*(.z =xYTGIL)/v c1!*f(Pn۷2rs~.H]s=݋wq}C]=[=Zܞde Ag /:y'fQXOC ypԗJ-L.{=p[Mj2X]1i  \ۉv/7;3M{ Z?ջl\S#ɵkLg΃Սk4BU2j\ڼb,J(oPSٌV6TWI<2q[A)@`rgc8}YGtTzbÓ#-.JAծ(K6`vXz5 U{l5 ϝY,מtGEIۡnw>稠NQơu G8W0$Ũ羞ޕMÜ_> endobj 38 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 39 0 obj << /Length 1789 /Filter /FlateDecode >> stream HWr8+PCTDܩ$lG[U98.MA""p^@d4qUait~]/Pb)v\vby'S=hjvvd9@,_fRXsUv(BﴰZ5/ H>mn?#ʎ ˿g߻qk[ŦiQ"'׎W^rhNv ̂HkS?(]QV([E%n?!D쨲Td'riE|~(npZlL;+mVr \3UF&CYm745Fm&CĦi_i) vQ\7p튏Wms;hJ,ױL_QMG!D||E-N%|$l QvG۱+$^74r|Wv9v4l!B2` J(VZ[s ~dl94ܗ)x5}Y} Ѣ*-UsvxS.s`W4Y&ɵ!&I H!"z ^̳h&^3fs[)_P>A‹z`HLPAZ*‰@ M @ w|FNc p,4'2oL#HJhM$g(>nZp[F)Ϋ[ `7( c9).n4s;vU2 Mx*~?kʫ竁nWTjUs9Β+jIq<ÊC{ڍŇa5CWjP|&bR߉k= 6> endobj 41 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 42 0 obj << /Length 2040 /Filter /FlateDecode >> stream HWKsFWLeD -(ł!1+`!J~%{M,ӏe3l"n~d~"(XM5KT@uy=S,P|6DZ ZiE+oE•֛ҨֶWRB+Dw|U#b, ԅrPժ񞃭 Y RgnNdz>پT??lj4<{1< $.3x-oO [u6ҥkJ0Ey۬R"?ԸTH?{ ^|=p?u%Jjx\6z}BpUNDwdྮ7p ozgdUթSiRٓW'n0{AbV*fSN3bo:;_]0'26:U{x{h Xe_(5N&T8uph[-8&%#s_zq}˥|T4I|eǝziܡVg 2~c3[t.p Ww卲:ArO$XAnkZ loڬ re{׋s؜^lND=⼩L7pwvDrz}NIC(Cgvj0~l ; !zrw6N3^0VX/S qH>Ȕ>[p%)(f̩DCK5> d9F`2o/|~fRgzo"{LJ~⤵o&0X{6W7I~1&z[`Kiw[*Bp]X /S<2y*MXi.' ^l7_Bh Dl'Tߟ~0ҏ _ԇOF1TuƦ63!&Lak $Ԯ [VqQ21B0Ӗ{Dq,I_JRa9x{ZVy]9_JiD<ٌ#0:4>޽;>f S3nQ Fh$ <ac!c[g,/-l,ߺ``-[/X=2GL򍉬}W7y˲ޤy/XbWS`M% 7Bm%5 ?-fB<p 1!w]SbK*#b_ q ˝tsS~b(c  N)oR5C/ )InZ*p^ ǃH.[3>A i8{ mgśt7g'ithZ}~쫳<ˍJr0ɝ% L%m#I[[ W˓:LH):}_~&){JI0y>/jPd3 3zC},8}N\?,qĵe_*hy NctU5?yPWeC %cZ]::+N3foGlo endstream endobj 43 0 obj << /Type /Page /Parent 76 0 R /Resources 44 0 R /Contents 45 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 44 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 45 0 obj << /Length 1455 /Filter /FlateDecode >> stream HWYoF~ׯX%¢<%!(DI>Mv Rd-c)Q$r9f9]ű+rl|iBDc㉸]}Bvt]ZϮUbWt6G7N k\[h+Y[p"/(맡6BB9Y _CŇ4 B‶Κ{WLa"ӹaC+`)!zclEhէ 6/*g;rpvRVCAӰDVStC6Z=nMpg}ffAr뗤l $ 5Aћ[ષ.D_dN" K!e@T.D皠Se.sN$V*$0o(E^j,A%WRZ)JaKE`v^]/aa7TT&yq `쩭Y;a,HٗTY:P=9-N$ =s PF#!1) |6eX2 sj X;V]-;Z%\[ ;pCS[wL=gt치'EbHzL` P!י}ls `X-(T2a?OGT\w;NGxM1u)Ҁ1fu$#bzQF_FNX0Q$K]ߖ[胅+ #xFis8RuaX&ž_PB"e{oMh݋r3C!S) ]I f7O(4a̳#pk !? ^)6b@gBR~$D' &P;1'K*Z7P"nt^_ Wǔv{JKWl8aآoh` X`F|i׌c>Xu#uEC 'CYE `#!]̥_z^|.tq\GY1NR&p@@>-aܨ/"+|\`E3Yַ1. >V9h۽'-1["t }9v܋* r0rp&pڷ _7x ~c?C/@-샻bGzWx Q endstream endobj 46 0 obj << /Type /Page /Parent 76 0 R /Resources 47 0 R /Contents 48 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 47 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 48 0 obj << /Length 1881 /Filter /FlateDecode >> stream HWnF}W pӂ@>؁@#594D.Cjsors//G7˥'XnFjN<1?d&/C=$ftg%^LJWr7MݙU;4_NNdՊV+gRO.oGcrΦ;{6+'39Kt>Yt˫M^ث'vhȃ7xCf`t|tƾif_ 흙;AǀT ʝΣ'@?t9c?J1a.3LR$G|iCfZUUϷf !Γf!LWu-0XEnDiln><6_k{t$y αc^N;v-Ctxz6Ui!@KaX8j -!5H9k2BIX@(b0~kFT|[ mnhײ~T ok'Tpg3L_Ȥ mQl3V/Tj -qF7h{pD f2KhM% u$S]ݸgD<ۖQ_ۭfc9B%ɩy9eXдZvZ? %BQVK8% ٗq=9pE!qkL﵎"ɛ)J29$Ǻ+roY-|:`Y$x@S6/s,.#q9؛_p Θ67O]OOQ@WVĆS:@p.]ۃŦEJ&qt 8j-mŶ84gJko(ߣ CڐT_)8J6[XY=0LMOP Oe<%(G~;6p%Ѧ P>^S=F;< ז%,ߍ ١mYz3 8:SuL @Վ{^C,s)7tqQ셽 X0>zӆRJPuގ=q<jJU `: KXBB.h,T'+{Xu%c?|K1ϛ gV]d6PwwcR}LAENwM * " 5VH-+4ȓ0Hi/DǟT@nN#tvQ`o2@}"Cɻ[x(Cr#gS^7zҙbT5 WfsU$χJū$NazU3n>฽Ys|֯n-. nAה2w~b#H_WԱ kPHtl:đ~.ѫ4|/ElRʄ;MЉcg(ވh*P]z&[+839d&?s~(!i4b0ܥ\vyy l|ݵyehr'M<F?_Y/|y/曼nZ[tK 477t v9t1|D?T^0ǜה&y5>b5kj#$:V ag%#UͪiaǙ7콼HԬw2e'9&{24ٵ[@(8@ZhNj3߮aMJQ0ɾm`{GA秸qm{'0l F?{3wzW-T8k 8(D탬spB:TX`!İtP>rWt˿;w endstream endobj 49 0 obj << /Type /Page /Parent 76 0 R /Resources 50 0 R /Contents 51 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 50 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 51 0 obj << /Length 1612 /Filter /FlateDecode >> stream HWMoH W QQ+$;mnǍnia(8hd)vј!9#)]i|ngz_V* =7PUO5]u뫇f=*Pi>T8 'ns1 r~cwUP gx;{TWybnyA@t.;꺲`U*kl;?ŠW/'BM?!H߇]9'Z՝'rC  .8Cć?S`{[MڝU#(ny$>| |sWg2;e68Fm!͗[zSԆmU?<_/vB#H$]k{0Xy$к͎wjpxa/M}1\"_()ekZ>Lw-H/|EsPsV’j*zF9PBnnSoZ'An\|:EK"Ֆa (NJuJ^yCەeڮ-/qZ۵@8N6϶uOgOdEOJ*5/TgF*%wiǛrS>5AkihzcHhh>cR .ҕ$XVKBX}*꣩7)+-oj15,RJ,eD€3BxƐ?L T">TI"kW {h"NDcQ3D(/VrC^4l5mxd n\Kݚډj]$&oxkHKBJI]K}a?ƈ0Xf xF8{9ZRKnBޗp3֦q1ǶfDJY#t6&uRg-7yTJJULӕVtWwͻʏ٩е 0FdDp~gv$ ߿g )$i[#NFܯ'^\jO}A{W\pz-5 $8֥ Ox^EeQtfp Կ9>|Rk ?)IKy7vCQ=T)3hwQ6ۋ|z`tͫ!rWCw}*8\KUK󔕌TAHdքƄ9}i>4DVBh&p'<>gE?")EٕH3o HzВ-ˬڂnF&M[Ӎk :ybN0)R¯jQ=On> ܕM{ua`O^[,i!GfL+B2 }4>Ѿ91P>ʠX bdpYtqH)+WR [ZzYp=L6 |/~쎵[|m#>6E,_ɜL\J`J:^$]%ЫbUaE8Q*YSg@mTgbOʀ_MS?tO?a'i<]Og]ۙAn|Ng % endstream endobj 52 0 obj << /Type /Page /Parent 76 0 R /Resources 53 0 R /Contents 54 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 53 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 54 0 obj << /Length 1656 /Filter /FlateDecode >> stream HWKsHWLy/3 mc'Z7'EaYx߯eǶ3ov7"P jyZ /ږv,TU}ujL+˹Rmfs[Kڽ[ѵyUW6ꪲ:VmoB?WVy:zoU}QքBfO3cᓪ7Pss'ܱI{es\];v~Z΋~͓ʲl/Hf}gwA4kh|nu02>q ͑s3TNR*UBh:6N{GxM 7ܚ%Bof!TܫCQA< E2^ &lG Glzǟ_ծnʴs%$cUM WBŰs}]S"U_Z5IK*^.?DW{dTu&XxNIM>%d-=6r`B+N\TI>;tl(Lt=3۴'A<͓hh,pˆɲ [r424{BVf n 8I2Wn)1 Ȝ jC>X# 6)'0[^J)ֲTP)[(U-ҎWo-OE@-($ܸ‚iP"f_4 {l`JԐ {VղV6K숶OL=osmtgXx@SK^4=KX7WjSJڪZω.)lOx?Jf/g[k풥EBp ^q9ŕ FSI9NH=eB]و%O ҉ɦ;CџV܋ZEt)1i ㅚ5AR!'Pp~Ԗ)1w圁G&HNZy }vP?b`D/b@,zҩЄN߬Y衁&ʉz~4ooR]h;8W伴kl/3-bjFS1QD%Xtjn/B~A)I\+}3>+Y6~09a.$1I=0pb:ou3,cZp@R\$@ :*%37/YMk&8h𬸰^Nns0b>|_$7NABr-^ʢ*ʾDKP輏O IP.'2m0wxop,E>.rI ߨේɮhlQc?|;3bNQ/ r'rm4g<+k w&{f+ ѕzSdG&~n4S1v,+7Ɩrб裂 99we>HD4c)%%P4RT3[=_ Vq4-> endobj 56 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 57 0 obj << /Length 1983 /Filter /FlateDecode >> stream HW[o~_QKE<{&ʲ{gmӸ=;>Bn٬Y2tQu{mVf4aD &42 s'b֝eGbѕ*:b׭88rEo 7Od]Wziv[uoA e &סR & (^h!On~ۺ[? ׈r0ۊ~v/JZ)-T>^(A ub%F~#-zn뻯D DA/VPVf{B6VNQ/Axs_wye^ ;[݉[$/oy рƻ/m,wd agr(g0Q$wphߎ<&S:T;VN"(A/UswlȼPfR߂39=0$M ,:uʂNȐ(W0ݡ^w"? SćjwRļԩ Q5fed":F̎ iu:ۼT& b՘lˀ)75ggnJai*F]R#S`=hçn(uE7Ifiṋg_L;uW"qUyOR\,3 cbA^G fg)y/} jGUCy?2ځn fd5ݷ㌬=!N*YMldSGVdӁldlȦL>_6uOK?#O!WPKP!jwxe@+pz9}ϕWlzO ^g7dLFdQ@&ɼQѧ:MQJ`\%*$ |$!׾Ċ+L%ZOvVF`Q A/>j'kٯtP̳Nx/p%g"-Y,K8OX\[.JF>1O='y"d- ğ؃Xr%O=:plM!XKI3P@v!Q뢕5 nSr_lk\ F6,[8#"HazhĂ~ӇсƵ33P cƼ~eٞ'z,… <*6d]W*!,fF~[®Rg U,YSwW`-BT'3!7,ʶwLU*w_`lUe_5@*P0tVM~T9*gS#B@ z>Vq-,9.FUY8̀>mlnu^ Ӳ35$Uv,;[VF(XzqqEJ+H8dʸi}=1H &\ӓ*N[ k: H) XKɦygyvruE!nJ3;ș2uf̑$Ws.%s\6gw+2߻= N $A$>pEv432Ѻ; t.oNFu [zc#Bx>ت ]>7}r`yE:X&0X r(m[+D 6]{.QXi`I1'Ŝ nx'vnc2=8JaiV< n!wo['_Ҁ5&!Uj\vύ~}QuUCǗ-ؔ |c sfğHя;ycc=@`Cum:Yԕ;2(2 ee@Sa/G9ӦIO]fiFCb;/(f h}׍x?nVZ endstream endobj 58 0 obj << /Type /Page /Parent 77 0 R /Resources 59 0 R /Contents 60 0 R /MediaBox [ 0 0 595 842 ] /CropBox [ 0 0 595 842 ] /Rotate 0 >> endobj 59 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R /TT4 64 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 60 0 obj << /Length 2007 /Filter /FlateDecode >> stream HWm~ba8Q|4bI8ESnPޑW$Hwgfsq|r癙g}Zב\/P*vJCT{Q%Dd@GXO-|7MWZdq_(1""`bXwNy]a~u vlym0]'{-8[#bT,6X@Y" P0<~߹9Ϣ4C>}"8"yX,b)>[K1,\؏$R$}/G;AbxzL&э6ڙm!Y\|nǎ4ErɢhԐ?:X>3}>Vƺa g$kC<~If<}8%;d[7xɎ$DHOek..O< nrYBIEIF3“T,X'8 !4rdɴˑm^/=mǘ(A~ڋgЧ4-Q\Xi?ʘŴ/PEDӴ\|t2ofش6;$y'jQϰVM"_id7#ZH,#f=#BYv'BK~lпZmzG_Dm7҉Ƒx4ϼ¨RZtۉP>a 6GQ~sϷ \M_Et!bʉLEdBߖ$o-SWhj W?lRzyiK5,'! VFˑ4{3k MHD #IEo T A6U ?|g}UrXlKsyW@S & )xE(> endobj 62 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 85 0 R >> /ExtGState << /GS1 91 0 R >> /ColorSpace << /Cs6 84 0 R >> >> endobj 63 0 obj << /Length 403 /Filter /FlateDecode >> stream HdQk0_1,}H:&z\ ^J)f5~PlmB&|,:zǔD/TEM*1Id z. Y돂ֱRMʄ ם/a hD e9fjl}Џ -g7)ܔ ޵_=i~ڛU'_V`&K o> endobj 65 0 obj << /Type /Font /Subtype /Type0 /BaseFont /IEBFOI+Batang /Encoding /Identity-H /DescendantFonts [ 70 0 R ] /ToUnicode 71 0 R >> endobj 66 0 obj << /Type /FontDescriptor /Ascent 905 /CapHeight 0 /Descent -211 /Flags 32 /FontBBox [ -628 -376 2000 1010 ] /FontName /IEBFGK+Arial,Bold /ItalicAngle 0 /StemV 144 /XHeight 515 /FontFile2 67 0 R >> endobj 67 0 obj << /Filter /FlateDecode /Length 20071 /Length1 37348 >> stream H|U TTe~~w;+0&? *BI%+⺍涫jO{Q6R=vVINkwFsuw{{P Mzܢ2wšSWZ]TU}Ȋ|v0VLY=@P4~̹>1H *)v{nb}GfD̋0`{UYkJ]E13ˋT5Pٓ2:ZY^.+v~e!oEJk+[=IdruFƅ^_qWY[g;3J$ ;6=Y(9vHJ@OtÓ!2IXB* U7vRϗ|e'Q+EF#cuyd ˱{Z֗n47eC=(2J$P71%Q +f`!^'S-(DLQl)0 q"(O=5..HdLBh CYLn|W$7|Ì7C芝LA5U}}x!'#vb#EQgˇ;i?-,bDv }كDZZp")~ES'UJrJ|mC/Q%6=1 ?Fy4]\d\ jkD4bQñ};ЈqpJZG:5Aċ1B6%WW~2SΐYbooi4s턱dsDsUld Li2@+hf^"``9SX.V>g[qSQxNѕ]'72\&>2EIUG&uzHm3< es.h{/}t jog-g!(H4FS4ѫ:.{>3cOEpbQ'E#=#qZ滷Ni-w.y-紘pMJԋRZ*uYmTL):>Wq878Mc) }ȢTp&ϼlR[{d 3k~3v0S7e(MT ִVc:o8F炃&[x yxy:L;4 1P\#M*?`ZzQtb?ѕƠoc8 d 1l /SS'ٞ&ijc5KDNa!;MTe4=ȑ  1+a]sĴ%3XrHfܕ̸/IZ:ғӦ6mM[[zYej 3ġҜzvUrYd;;2ّ)=V@QC)@(PNbsJ/ۣ-p:bdNYE:lzftSnJ``o.\+BIlסGU_Gd,z4V:K5]Mc <^.o6^A5@lR{6㚮lt&ƫc\F bw|ͪq ÒY>I7w؆Ng* FY#pAZH lS>8ꊟ}-ZQ6P RjH>FC@[1u7R MBۥt`SX: !:B;l(NN~9sϽot -fU) /Ê ( yXa=.8\EIjpvvxxY5Ee}ʄuuvIU^.ҟ  P-*ᆒ]Qm! fWD]Kr[\K̽&&~'=;)=VRym/*T}B5Nnʇl)G4)ic3$5R*_a[3_~SsXg/)9QgndHC"S-* qB~_a&$b5}^S=W.hjJ n$S@VvƒXVK>rN+PZ泅ٯL|d+-JĬ />.kDMQ#):Ǚ۴M^*>u" НI߰#uglj ggzyx$=|n19G1R|bWA9kGK>';d.2G% x[7HBOyh>O)Y"ꠅ̻X5_)вA~/vz2iȐ-xh{7izqb14͢ByxlK懲&3JGA0vnF%~zFQI6dihC꼶TSR^S4f 2PXzFͥ Z1x7;C"iT}" zMRp QZ&2By4|j})YTFʹAn'Ja4YAc~..x9XN7OΓl''2lyDmv`z (+;. @Wh@ zXQO '@^zV71~B W8 a%ku#}<VW|]L3j6~= 9+PNnϭAemd⤏%?oXߧo璘HÛܴ)G5`uJGw}'2U+%-Q,[O\[xsh+U !0N`H'wپW^t˰o`c E{6yqs /?k)/%pg#u8%|ч5X#Ĕ˪[Əx}ORS:_?U缋!%kWimmeȓ=uCSMOn LJɽMcwntp~Fe=ޱ޹_#?Þe*]j3w٣oߧri/i<XT{-g>*%rWgVRnt`rZCtF&;Kc#֑VՌ<7IX?C(f(}눥{euAw4\\Ň3$Cm۷gH<<+ϹJںqXnswye7_>,B8rڽ*lF=G$1b/,A2 1T?tUm؃8I!lBU^mJCL Ǣ'S~ 8~?AjT?ʹQ&^̉;5jCvm9C`8Zwp5s cO+;a{VQzA5}WHI_&V>fELOh{#^o$z{TV.[@Z6V.ܵF|B=-n& AFn9Gn4Ԍ5 Ý1GH&Ihy\mcTf0yյZ3]JUk! 1t~0>8σU6} Hؕ5ŹspH4eJ\a99:}9"u+z!ri<̰_cd9[$O,#{.9 9_IW|Ydɬip2up~$o,i/},$gf e foYke,[6mWH{&RC,iU-kqTepϣ=~FQb?X.KKI$C̱$K7)CSFĘ26AV`e#L(lWĊ~wҔő&T ܺ{](BQ4 @#QYuecHHHH(""""z$888" ⊐+"""""b b b0A L"L& TDDDDH!!!!E@@a0@ E C"< < < <$WDDDD^yyyvxo)$$$$j0>c04I6 6 6 6ج Z ,K Ra)"""""R$rSÞ:6W6B[ʏKʟ%3ʟ!SʏʏäE9S>HNmqGXzǠ$4 -B]fzImZ[VLky+z+&+++VLW+m`5jB^T0k6*:NdkAz1Ht:H_ h%{JgCiY%.A@V٫ky Ek5Wh :E0!Qlr\}!A^ju+usЩ5R>'ܼ2v8 tdDbNçmq헶E'l\(.9-}-#hفY;q+pCm(>`M*k$ 'VvսtF~5Uld\ kVWQ9*haqKY1-/D{Fz-v\%FDH _kbu[|U,nNϊ"o]{XKۍ/ 7a|dJ3]ג>mMk֚ZVV֫t]]:Ӊ^)6϶9XK.0"AJyXxٳXIǞ z+֊Ԫ!=}ۭ-m=ۊXZl_ jJ3 /6Xu;pl#;zr`OwVnݵcmw{s8m8/M痋[ft`5 tg&ڴ`*芴"5ݴ@Q: MtZ4:-ll1T='s=޽6mY%c%ӖI81)'Ŭʤ>bJ>D`3 `Uӷ AR!5mKdFbtt=GW:dQ6YŗǙ8tř8ScnCzԒi}P"#p˛i2_&(gPQ0}-9 &WǙCiQd_uM3";D-b\C{[4smyKQrwnx݋D=^]}}}= Ԗ[іklLdᆬ> $ԱVAY@taBSy?qpMH"`KHMCNohi(Ԫ+WohA t٧tfC%"ٛaYLk劦x5Lk|/x;,ڣM߻dB* `Ҡ!O) !rN#1ՂxQyz'{8 8oDhj>bЖoC ( Уnʣ[C'Yoͷd^WJNx&g^FpJXsc呒=L̬~w_7Mȱzc@5Gja{Ō"U CYK+'4иʀG,orpCm?J_(?ԏK'_7wדy n֑W9s;A/`?4>W#GD;Ľ_EݬuF+hNzD:-#}(0E$"I@(IcMQ+aT$BHPI"QD) @D NS{bڋ 5@IKů=ۙ@&WA-ҩ#A7by٭qHZ*١&=ئV! QYPz-ةGDaܸ. J8 ]~ٜ;:'UϡV}l_ ;}}aJGcU аGgq}ȍkʵbh!1ijy;h܌ω/7qو'8^\fp+rЍU=jCCɁfcCgS9/n&~vi>bJb0e}WN =\3 3udA$Iĺf3|Q|'q Եe~Z?úbQ́oͯ1⏝HMkn8Oz[~ev Ο:%Jr=V#efMzi꫃bpt?wќB[[=nzEË{֓ȒKz]^p\G%{.)Jh6d4qbDD4.lx=^RHc"+Qh_mMD_,_ -DюŇ|úgݸ ;Wħ(}wF< <+=dS%"KqjrI! ILLWsDt@аPh< Ⱥ"#'+E*sPX%c_ig(zcեRo}-8c]MV38ywv]w0\LIz9D0!"aFTNZOS8~Þ 58 xGLH B+| ''yV<ESw!|r?޵Nޱw]Pfr4T/d C0Rn,sC-P?-x~CMX}~igg.V.?7/˽j76٥>nʸaIrmٶŻ9@Eнz ZEa4u"]#vMsfC:<".bls1Uy2VVO`ˣGX|S\_]A_9?n˺oݟ>V~'skI& #Wy$5$GL24>BBd YKn`1%L>i;?/[-zsuD6mGQ{o ԱY?Gbk ,?{H8,\aijؽEq9xX=ڰTUBP!e!Ha IŘ01I Ws_i‡He2o*  )x`Б0֮UJnNUn{]h99BHTzC7Fe.lە6,.ǫtR7d\+L|y>}Y|HGOtvH,*nT*lBMjD&?bylO#ߥC'eQTzU|,).!Ǥ|A# F{pY dJkb*V2Mئ@*9 YZwIR#Fɧ7)V Ն8S gC\MEX>j/ dc1-O-/r\6iD\(^˧0UHZ``*0Ų 6r$RNY%7W>)77q5P'~fwv;I)*}A-Bjkql[yZ6I%[}n䪶ӒjTRh1%`GP48Uvp ֙`s/Liӯ L2yO{RS!3PpR<_s`EkXMijB9r҉}BGcOܽϜ]ۇ3"?"zQpp9=ݞP0Q5OtrE7YG=(k)G&kue[Q<)< ӚZ:Eax< 8/6:ٸ3U]jfM-{gϿw1{oČؼ_M Џo?@cӅ$7QM+5B!$9=a,e-._\:9#*dv6ܴդl}qR?izId\ׯt!Q)FW'N'bV']+:m("tF78DD̖j n Ӡ FٍV%BP#zV$"G`CwAA3XEu's4_ļ`֘35͐AQ5~7+7e}:C ?8?!=`w#ޝYb$snw$5 O %?+eZq^m9,@ XGY[ HGY\1E=m6~yƶ wG>dRm-.[r8u;ՋfelV*װOt^i/z_-C61a{ø\)),3mw;=owGh^0/+m`VKYZF,6jxD+ڵ0ND)QzBKN j蜱^2;}N9zgd 7t{(ۆy+XQE%.py/ sĘ']J=rGb9iQp>I\6gU:F_5{\L^(*P;-MjE`<]ѵ1  \B|F E(&Oc $@n$_$مx@l$_`AUOw#[RjzZNM&㡾1Ӑv9̊f=K2;4$Gq,E#ᅨ&ߡhl>!d2 c$VV~4OB"y GC;!CbRDxbհ);QvI&8X%g ,wFXW}˲w{q:>%"RF\r!ImהN,^E퟼3Cy-|;!q!\4~)"]N+]jp4E9f`c}.K0%)3v1 5 AH X\>m|EԃLO~UOfLntgUÿY1{Ӈ!{cjK R:AMQU$@FшWPCej~K-#ץ^%RB7ܐobӁKZvH"1JQQ]T8NjksS>geRD4!BT`)a:"p:GG!(B%B4țEf2 $,@fnbsDW AMjF5A@oܼ٘MCoH)%}Bϊ⿺ZrȳP 챦i6-U-?hpWE%9t|'QrE)LucDx3YvsLM:"e٬U]t$R5A!'TVS}[Sܶ[;zVQ)d2&O> -'}/~cҹ<3l>&yW?ysT,?)kj!r/|s6Z|P7$QB%¤+wclNd^()@ͪ e>M{T\‡tX$'l`PBsULVX'`sS9GPq2Hj&N > |B8<-O;ogGRhL qnﳠJ>|Sςb93r8[` !^c" ?D~C'ۯeQHesuRjo"g, E~es]݋7 h19k UЅJ?Mw O_4HMAU2PȠ|"ua~w~ E!&˔O!ڒ|(l&zK:cLޤn6Y=3Ա\Xv4c$ hLGm)x. $/4=9%""fK_=T'طzhmK?Pj/1vnqD-ې#TR{v.DEdJ BudBN%K˗Q'dYRo.$J TC4 (p>*(*qcemA fN\O 8d+=<`yyݕ(%j>}F'GVr7'Unt+&?$ܼǁ/R2?8^qo`duy[,g 1zhrWM8T~ _w!?>ekVmm(#zew_3/#=>Aiub2*@5 9h $pk Bd WTD2.j> D  t4𦁫/f;8>wvCZ%x@>B #KD!؀tccԦm-ʨm"TUkJCiꦢV6H $V8sBaSg޻{` bHۭ[ *c2zr}u>&SM\{+9|9DxySI^JN&D:~dae=QU0wQipX7p\n]۲tY0]jH/ w¨=݅XI'W\R j+᪃2ƻrˠHsm[0%O8nt PLg+ mR:RnQaߧXO[e܁ DLt g:*36س8(t)Z?mf[ȶԭ+d۪ 'goÞ({'U{`zncAu I 4|d;x]pp412?]: =zT[I[Srkz1B.zmײjO0^o]~ "k)--}f#-ȑ---@?9 1򯶿qS)>k#9Ny*"fȐ3JcJdFX/7@#L՛ ,e0z.xl )ɋ[Py9V[s/NCn]%˼krbΛ Skŵg!n a? b2%+o@ig7XIΜ`<뭼]]|d嫕#UXo#]+Sc6ŏɸ(* nSJW*T( R<2[=L)'/y}ARx8|(Qebh!(;^e9BjT#^Q jBoRZ򅫵34Nhs`'+ <TA!'ZA^{_c ҴJ{Qeq37ˀF&jBt;Bӈ"F툶fz0!s=A6z&q~(&bfk3ZM< R"Jr@fvIS;S7[K\v57Sv5UJFaK)v§uv.so/] h \ ÷do9}Coo,PE >dpw@2ViJFm\-'.tDmtyXGmzgQujcs"\]= n9yW/Cl/ܱȋSF'3YN%u$]˝-.6#G(WFD(@q8.BRF R}އ0d>xQMը Pf+R//ʯT%P$O6BvrZzxhAL4N'义e-uoZ SZB1VN4Rekqou=Ri}T8tfzѻR|@Mഩ[Bΰapc BoruyN(S pMYyAxqW $ ZR \ȔxA#I=p"Ë1-"-*v`Bȴdt#Mѿ ~FuoΙssvzw!f^lo%P4nH IRܦ8R"JIRUTUV_ZTjbLl(Ԧm^ P %Rˤn9giȍTU}8ٳg<9TBTv.QlepR#N5?COΧp+1z/ a<7ttvt2W9ru7 Y VѷE.ܭdY]fBRiQ-c["8ZЛZ҇!}Z֭\A2͹-[pg2<[ipfyNk"lEkʠ6d [1Ok֤8L,ZgŔsW`] M7Mqù|I[6bivUEBĚ^4!5 q%Rs,O$خlAdI;z}Zv:r^6N~:PwTM؆eah2>A +a sȒO&PYx6ҡKY?d#L\Τ)?໿vGx/DߨGޒ~j8vC%ZLD ^oY3Ԯ]Dd|嗭UU6VyE;^KT^ܚR%~Uy|̡=ebx+Ev-!42E&ɚ# :+3tgZj MlIMTVKQr7%MgCXεOS"+]QVO^])ʺZr51Gz4jngxoT[6_p(Db^M)ϐ?kO"#67rr; Be5W߹~3?4%TXuG8藛Býv b_vOCy2h_0>C1`ONO@8U/>;@t_> endobj 69 0 obj << /Filter /FlateDecode /Length 33662 /Length1 321284 >> stream HV p=nB^$1OKѡ!@% D 0j)R- VRjՊRF)2j"*eZƱJg}-3ݽ{޻{_ b֛L9[y#s%/X}}էG f܂^)\ 7u}cU;%jCuTG˲M[N麩oHFPHߗo#Z zRSMUhBFu&ڱV'h6aيuQhdzZZG''[HR唎Q}UF9lF>nScD> =&I҄AHsO皏tP_Hԇ@v<ɖW-2@W:=tuvz=;gmݠu~~]-وջFBou􂼩/r얧䠾]&AO}Z׭x`b}+qi. .Z&fB#|X YyQ_ yV'4Q嘦+o䂉ֹ:[ݺE2W[$_h.n}M;v2z`:0 hTԢ^VI4HLR!v鐕*K;eM^\2SjeIb[:e̖elrK:yP^NӲFK&K,^ig\J܋O: ^|pvcoV<%b>Kdc%vO,N.I"eE V"V)\`Ddn*̑BimzlM&i^u: TUVbDKKEaCrs2d N401!eTP2aڸҒ'RkH5dҒ:;W Vyv~gSEB)v'''PLD E'Rx"6r~CE:tE'.(D Յ< E\/z-iy:7g޷@h|obC} aYBUD#;10r~ nk_} sGd-iW\ˬW,ۺ {."l|D=X \\zD/s!DF;Y^p  9۲籄lX_l7Ϸ1QBV2N̕>N l\i[yN=5ǓHا=cAopu9f?j|kH7gܿ/yo|~8w9WμfoFmcn2M?֤'\cStbw-z'RwS}ǽ2 ̽ͫVN/}s}ʳ!8=7S86͇˓Nt~go#|yP;aﯡN bϑnz7w%-cǟFy'/ށFĨ͇AT{XqllY2L<9AbbsWM}Vyss[h BiKii 孷&1 2挛.f`\ Y 86c+ 3L,נN0?NYɜ&nư/~ᶔ%~07=y{vh\co[M̋="ڏO'K|X.kr*,I2+ѧ{g17ƉO?=V"u0luSnEOOji1 ܕ1(9; Zȼv^71,wg[iw-oy`O:R/+!iJ;Ss:(-C<5Z&fm"} {>d {b n)9|gm:j4iӽȇTm0&,KN_28Aq&ہØwYI zTjDO< ܉:k𣜩-qffGyK ;RjmnӮ2#\2(U4/h?źoG99?0 8,~VQs yBsB?4`0Іi?}laneb[PD* ZT #Rrq%R1N+29=8l b 0lF|gNN٫5yqL5ܽ2 |u1gRaWuzs÷㈧h:j҂vrM*W@'OC%޿΢,gYIjoɱpLB3yvB{~[oܵm}c 5zH3E678k%oK\2o`|VXkeyɛaA|8p >ַr7Mr\12X#zHjpQ5Vfn `GݫM4o9~OHk}y3k#b;CIѕ/FuBmUWf\WYՕļוt PmHJ;3tk/{}1{8iՖ)Dg#i@dAevo5@̷u ksZzzrMN ut[;Ͻoڪz|ދx Rɻw;M\,ayCvb"g2{? W$sywAo 0vi.WW|' #"nNr.%Ӌ1l7iQ;g]5 NH 9은Xf3k^L`!oI $9. 釹^ ''c.4g ܷ1 Gloo̯;A{M@F-@9:e65dɳ%F3c2¾\D?{)G Y<ľ {kWRs8g= o $jiuUp*!-ǵn At0\ UUc(y&CW ODtGE Ɔ>.5aw=Yȳ\Evپ`{Rv8bSG regQ/qy[ pm#E[""/<:#幻c>TA|*2ٷ5<~,0KuJ|DN[&{{$-:<.fwzhXLFڍ6Q_9d,gkI`<>N^_"jP~!_g7e}k]bkTD]l]gp;8F.5|{5p l%"7E Ԇqrφ[b{뢄|\I'wqq_\ uM܏m~+s1[h@\6[}maGdgWmӫOWl]7O[߿i] IܿJ]7e=7SVXR W VG_NTqr#:_5\T5}sww&0Wk)O m~+dGys|1hgM`rl ǵۉ9=rG yai\A|ʱަ$c9+KoOk~N-ź:HƾY'zٰ:yOF5x(>qW ejُ}eڣ=92 GCH)FTR|ʣ@IJ(Q DI ((C"Ba"}eL i9sco{cN54W>ΓL5f\Y@"j[ZC5ɕQ^Ֆx&|{zLgƧuI|rb569XñNߛkZ:K̻sɯ:3zbv:UJ[gEt:$CmAڻnI9hbv"h^v(/;v W1Cߠ͡}пxVh 2Y0Th b/6HnE+& gw <`$YܢvZ~M7.u89B#}k|aRsp+XKWl7!aO@7`-p(ɾI Wʗ/0{hO9cݏ.0Ygý;m=Dlv DK]k߿| ޿cLgH|/ߋd |6$*0w2|J -}P{V{t;xu,oMUx/?Ӂ'3i,'$?cg8u6@x_#~Tof'kG碝+ԣ7ѯ-{>M[ف6]㬿S(-6j}==*fԜcvNԹxz_J'yh_K g=P{w3Fpl~ڧڭ`<0͟y.c1R6F?yا55dמWOk`μ>65z-?eh.ht6oeBqZ=SF \ nmmX7 U٢̺][,~Ch|7]_=G+sGul뺭}cM=A`<$oÚgꯐMV,s>i4#-=^±My{}f <3a(qbdoj5bih q)>W>`;_<@` ULz8LX#LjWGo~ j1kƘE${,QS/1QX0 .h`2z^8^=xa{qWj mXk~Ky>+޷\ 2mG}u;mrƷGY ^9-z>' Ko2y_1̣(j!k)^-5j4|ާoԃSשM?`Pxx Aˌ{Akb֜D:w>0mVGԧVo_Yg՞S?Y;t_ȇQog`O:?XȇW!+yZwdd{ 霨M s'ujmRe4ʦ6wol=y|Xؤ+d5E_kv~q>E'h-~a}Y>Uk]=D_½'Icp'D;oῇ1]*/&jpCn9rBEP 8>z,o[ Ni 5S ۬M#7j`mmM(vX_/PD6ˇK 3RMm?C<9t_R'~ '0Mw]3Z>5Nһbq|J1ydE$OߍOx9;yʸ,31.mlxj%jpϱ413,:IJFM[o=[gC[!ۢJϩS6w0D=o 0SAreFriG}"^Po \ 9掽Mf5{a|U{֚\:O{n]:CSc}0Y(Od~+?ϓǩfa.ΐ-)Nhñ={W]ŕ>cՍhwX,˫s|yPZmtyyΙ;3;'qۆN:q__vVpaxϏ+_wڎYu~g.X˒o>\Nٟ>9n51_!;RM\_vx=Z3'3ڣ/~VVv(uT\nl֩*ٶu> //.Q^%wY2G_0/'i|y?EwK"&t/Ō|}[xR9ޟS}_pY'>=UwR?X>f=l;u߮Sv )t:G9]G]W83~.T/;_Ϸag GrYBΗ,+WǺzU@_|m"S)9|:i✀VÜFpOj0 {Qн7:"MpQvȹM_3Y7jEGof >͗qfl9~|>[ i:t|;r@+\ke`$05-w7sɲ5q /3]C#Ϸ K9g9.WSsw'n׀eohg{"/ q>'IF֥GxnDo¿=_#]a:hG:.ƶʓQ-P3XC~(o5pi1#4|u!l)茏9 ZsPWgU|(WU!H}j^^.fSjYM*v[່ucz/E.HrW$\#k~sՉWd:2JU`'ga)/#|}vF;_;׎;74^ŬE #88Vϰ\d\ (s8 ~rHt5 (h~p!>A?Wӽ*PU9('B&b|娙jڛ"_ONr}6 G{A#g}cbz/oB֡S  _}c|kMܻ {#z-$.ߛim5x͌'DdEeoþ7'~/dt<7o6֫6;#ՑSο]g_v+ +z[Cݵ1Jա_ӛ\>DZ~u_᜽_~׳y2rm?kq76)UA컞7s1E:9CPw`rbFZOrT <]t(dN&t{)UB ]+mAgq#8.#Gy쌢hT4=E靌[[T{mdoARm?Woj⢽EE{W{}--  M^gv%ޫF22'?2[m|Fm|`d>_' Pr/= Xg9)y*=QbF$/zJ^@V-7'/cص ܅F^د^ѯĝƸ?p2lt7LJ,Ga[YM~hc} ]{  sS,KnaEQB]Н\ُy9:>dB^_xr): :'ԝazrD=Te\0ފg^ > ]FE[Ό\j6-{ADy6U_193OaioG'jZ5!*ݯ*{\xDd;sǢ.+ Jun2IUM+࿽jiO'|Qe4.UBr 2[IcJ8e+sz_x֗2oZ|R{>.zl| uYנj8}Goke!Phا¼e=ZxC4^{c@YS`g\yj ;Gn-bdu`avGrmƇ:ZM\گ H}F3UX}6/0Ņ^Duվ#F?;wY}XZ;ѫ?2ah)mІܼ3p2-aUr~$ݬ7m{R}l/kb$Q~zy$BRdM;6wKq{nm'yhKnaRc #btw\<ߖ[)|_vU߁ ś"^EіLv5nI|$Y7$6$Kͮ-ZmA(kzbU JnAPF As{y;y3m^>Ϝyy^ռ>l?H~"ǽL{O{_ѿkM#|ߗ#Ϙp:[{ȟ)k_W wnKsʗ'~W_7Y:{k:_yt)~:۾>J1(o+D ԂsZ1:43cҷ,}晜L#2lUgozV'i׾aԂO?ihM'){4 }ɧﶤȯᒻ 4EVO:H&F_ZIݐD~q8??x3Wǵ9!tQC:4DͰkF Qb ԢACiݧcA?it}F-4^jU/amsA^`q1K <'1Dg\S̺4Ī0h67V ;GU+JCT1Di>4Ėao1DK=Wb۰t3:4sT)pݟuP5ϺbIAJCD4_p[/Uh<43TeSU h'sE>\X,!X%fNk$hTXWXT=k9iM#(ݯ;uKJCCt5 a?n:˔X8kgT\!4_1gKUfS-ߐB1G\Kߗ\y*wtZ=KEehJCCtTH <'ۼ~sxA~=&q(blX/) mF ȯddu%evlg8^Ռwz3G̨"~ 8Yf=4SU1tR۬l\wO\##< 㐎PNkN3' Iﳠ}=fj"Yr4Bu;ޟ Y TU tG,2n+ qY!k'y9ӯȲ) oF Q(ZP@?i 7! {ވ!JugaWb°'R g1 k(ON+kj%#u gCiO~sxA~Ҝv:Fh"hɧnkQ>hT F "qVb<"g <#=Zw)e9oưF\bXWdؗnS+#z^Wrv`={̯ym)?@a=uh7SƺS/ziݩKҀ;Y~w{$u?о$kIi5^3b^\Sl~Nik}͈!z+x6ˬ( i؛F ;%w;A4DӰF nnouQA-莘TN( 1iؓF Qc,qFn7n-A;=F֌~jMh'0b8`6W`NRX~sX>!qu,bh1=n.* lKF Q;8l؇}?&߲Cr{s FgXוX0#(Ǽ~Q!7buܽENQ Z_QYO+ Q1CPgEP ?i>O\##Ve?;ӽ4(:(&Γ ֨ZZv})o;( apP~7dXGmUQ?u Oq1戲S3hvF^ŢܶK( e[F YPC{P/e]cb<82hB $Mj;8,ޯ}m'vlgqHB'q'7Rh""@DXO ! !$=9w/?ܙ{YϜi(%c^ ZAo$yY)ì(uF9 oQfq\}2Q>gu8qw8z!yqd^zCL{,2rQ*=BQWeȏ }NkO ی@YA޽CBc1GfWϬg.4FQn6|m@[/;|+{>aU`^Y-04+RC"SvN(9u7,]%Zegqt+!%yh; | {ηiZV59MG|ZRm/at)5X Ɖ;d u@u?#(;yF>(4ƘQ3|m)7>Sa Y;Xp(׫Trv埇J(7W粋Цރ2C?!أA~t |FY\.&WJ] :啝>,4FQ2|{ZVd*1vQJ4l7g;C%uXhnQ>bاA91#_Kurv~}Bc̊ru`\Y|`Ri)߂` Fyq-Ĝ.IYDxbPy7|!FǔJ-= Uv ϖLָYPWƮMrk Kê6q׈`TiW:ag>jUZkf^ìςҼsи+SS.' []߱+'`K%sW׳}|oU{/7 +;kOd v˿Z$ fqiPj|` F,ؤ4)S jCU 6i<4QzJr}P+AtKj;0P44)վx {:XT~󧐟_͘QSeYegN5FCU_؟L-0'9e֋\9aM3c':Y i; A]qNֻEhFYQ7PP:ac(;k]~ͻv ٦O(~Q`#_jh;kχSZ6PY;?#J 6!hlSv=x-Y 0.&j|w`71彆7$4F([>"^`rZZ7JTj;Iwkσ~ ! >"_g>(YyoN٭4{U[PP}+;kzcNC!U{+gZsV6rFAWBar4h^.eg> ]ܟafu-shV>'U#.16pSab'y֣BccGЧ{|So,O?Xeӂ 8w8 &5~ |Pj[?k]i%*~JNaH~U3pAi8ԢGQ0~1^&e ZΚ}.bx-AW_1W|WA G0O׉9&Qn6nG$9{[[C~s/Kq sf}jn't _o5/&9?va? ?"~O6_'_~ w 6{s;܅SCj//vX5ڈ{k݊.w; rkė;{f<7[{:fe;+u~$v>v}=v> }uv_Gq]{ķGyMù. y8/grǰ^^w!npV`lԼ9Gg`7oMxfc)n7bb{j@7>ow~?a=s2s߼u^nhqnF7p;n)>Ŵ)bgh_&{~u.O&GO%)^Vă8N75Ic|p[ۭc^f-Mռ_Ĭ~_$wK]~.u\z/s49Kn2R8C(A{=Ie|>MenIQ?xrt.rtVr4QB.G{9=X+cR_>7wsP>di?- Þhx(Mq|K8(&QJ6NBi B  $$*<"q*EY{왵mb L ^o3gӭ-0>iqx&o;h`J_gkզ[ךMEus $%0>080_=Uug6~( u1,f-Qɍr=(smbGzUuqs\/^6V.i^սs}ڪVWTZ9h0%u@)6~5̅~nH{][j\ݟ]c>rK}CjDN쐬K⏊:9}gDư|e],uYg욖ԜOI͵c yVv՜.uޣg$cUӎT|oUڮUI)mD FcOvJ띲v*]9eVjHggP`Vp4kykk}>[|UjαHs1Mo(L-0uS:~b]k|\YN7;0'0$H3WpEyLی9άߖcZ&fc!}͘yF 7.KM{@|9K2'v`|/ixZ}Zg7>Kx,[[>,s ɸ U$fZx W[0ke#c<27u~SF7^~Y|~ |\n_P̱Y{EYWY3Mց y(1!!<$>9>/qGum>oxI^|՟9FuU5{4[m{?)f?sV˜if.aM){~~ˠ6z2 Pzhg/ƌVvy|vLJc}2?({LIy_gݦQ }8>}uC`]X.&eN-cg!41sP]Ok"qq}(us]TC_1z`=Q 1)fRw00'sΧsg; v7}|v3|ز۳֏ޠ}vuQ9sk$K|PTky]/ɯ53)FJ :m,4E[/A4KeZ]筗pm]ZN|IX<'=?mY0}k$YS0W5qGMz\8Xk;륬_ _gMMbC?X< f?}e ק:Iz[Q` 5J_Kk ؑ3(|^; vNWc3Ц0~J֪u,wzaI/= m\_ijV9#‚n {gyx6Ƿ?8_j}=&f(DQUTϚ!\KЍ5U]׿2XyQj0iWl߇"X.E| Ƙ~Q(^ ]Z7G0"sgkح߇X_Gu͒Q+E;⋢s|xf9 >TP"STl.(g*\ zi.6^K|vS9N7j8zKye3u> 'b9ڲY|9kGK_{~ji0K; s+1g|3kzk] .ɋIxq knJMFOSװ6u.Z>5ăukP5)?wLbGIKG߇Ucb A]ӞQ|oI9 Qx bxPuYZ7xQb#s} YS_ [-}(usEk4>Ga-5U ׭~4t) PH0}`]=DZ;l#Ѧ-a | `]`oq߇ӫ:K--qA,ueՖsNӍ9],=cow|e|/%>{.5{/ύV'=ѧLͬpzw%u}K/zivǨ=* gCO܊V|9>q,O%S@J_`]pۦ߇cS`]䭗5CuQx}huMmM;[/Mz[/n>(J~9{ wllYZɧ)_ON]Ϛx)E<>ܙs| s^:чM.i=KRS(}}"o4Ea4E_Rl߇"XΧJ ƨSX(E/:ưװ[.?h׳}daoLD׃I`0 ~E;}-R{d_yg5Nz Dwׇ$YPkUQQ߇2Xy%3Î[z^Lz?[7<,h_*{iV~{80(w|g /\JE1}s9 ySܘD/-R8/QPG[/|4e#PfLijts%y%~f"~ƙh4Kz s={O]#'SV{Ӱ7^ڸrWbT(5uLmm>l?kx∊@+{4x}t~;MI4S8W(}}UQ:D0Ӌmw۾nvo2Kfm2$(AB@H<//-|p'3nݪ:usK VɗاAq֬3ӿOqqEM}RV˗8@i5g,75/q Wɗ؟5[z<۔a7'e\1pxoش~иfg_{uMś+$ƪ#tdeJ$eU|)Fnd̮Ͽ _f󥆉|Fׁ/3t?#;m7VڳlR}l9$;%13}\:xFvF{&a=˶or3Ƶ? f mZ?YK/Th~nR2(3C0 -i9H'<TBGl۠zW3{ b4ݕe; צALxׇXM;j?hzBsg7 3? {^4Ag_d |B? SX-4+rmr$$L:Կ}| / ~tM?hqu rhxl lo1,F}EiZi*:,(^qGKƿb4F~Hv Lirh2 oNAUG}L?hЀSB4=˶ڳlMz# 1.NrE6^#5PMUi! jA#Bտi 5]5Y1"M/?=1ςip5? 6!5]z̴~@i0hNsfeH1`giqM&f|j;0 )~IqwQm=jmMsM1>y\l?%~qF|㐦/qӎ/~kAƴ~8a>lڴ~QpP4O\~A|&Ji>u3A#=^96䌬 e/-yjZo¸} Ҁrw']xܱ#L4`]갑30kKSF{ ` _2Lcؽ>d8`877܇9̑b=CvYXߟрı>~O~} {41g>a1¿0Ѐ3vf,gqm \";n18/ <ҼcIyDr1_Q%iy#n0vϱy)gs}C,r?xo`{%"a)kQx[^?BĞe9_2x=G}|`OK~mݢ~@;b_ 4t*)Gqv^̑s=YNs%} u]6֜% tÑ}r2Q;)G}5!X |G3&=nkHRsr/q.t1_rxFI5ۤ9n` R.IzHmw픔9[Ԥih/FԳ4ֆH NHkfm.^R]HyN"X/;WQH~g9 $Wm@OZRrj(iފGrj4/0{K^C)"G۰CS%rcJrqvH9uҚp4ɽRAk [խR#|8]'}/H.!zKK8Jc2bR:}*y9|tT"}S>|cYw> =K!D途OŽda>Cێ>'ͿҜq%H;N3r֥+{\>xJZ0 /9C=|kF zyo3ĵ@z#{,'cn'_;Kp2|z?&{؃u'2t!rsw>{o "%ot򩎣SvGr꒴>y$X0z?˥pv]wQlK7%RHA9R;ulS;h,jx@jѢhW CtQ ;k/?3瞙;s&r9ϧJz/wXŦ}^sas> kgu9>U殹x3p,u=36[v=U|̉5v*z}{I{֣9bBdzP`o9k=T֡1`w/`Sku~wZ#gu ]u9@fGC 28;xݻp+|Ǝ?TΈWʞ?$[{ʜڙ?:l=8!>{Jm5–{d5w90O'HYiSYwzjXk߮#,o={ =_p>א#psz羀cxu<=al+lyeO )~٣®'Ɯ5~|C|?jGgSkQkr3g!ϽS7xRף&W*~OT2?O ̞sHٻ83RsgW۾#pZֻ{S:hMw[hC2jIθ!-tMg 921wo\wԹ[~ ?{)8w8>>vŹ1۬_+|(lcVgwOy쭀Cp<[vyyc.¦}XyfTj+b>~SjfobqVj\*j{7Zf넹Uĭ9Z~n%Fm^uxbǚg!g Y o?ȷ&˞5sNF_cOLq~#ঁz&Y2Oׅ 6.Z᫪}mbd]_>P7wo뜹p|o5Eo}6},zR{T́rn WZP`Isau'_Ey/Ϩ3hm9T֟ۜg_YWO\ٟ*穦5MLb2>Sjݰ{ӎQj-kX]3=3m%x+OGJyux#w8O:>}{'|rpYH̖YZ/`u_Qky\w]9`wxwF<ķBYKt8m>λRՔ=sA>iEy+Exʻ;vmlX[.Z_o}OwsSlϔ.(ψ2Yo/ێcԯpO_ ]*ٯ-\Tcm)c'p| Y{*xH.OdzB38[?4׍3Mؿ# mi-Քڵme æ~us_x:F =S8ytr+xnq-}'e+ܕ>D nbsl-)N ມ 㯕5[ӹw_+uX35Y1?ǜw,e[xwt޿' 8egط0nVcn|dzgc*P;:;O4UYsϟ~pt[(ſy~a_R'oCǚa/3i#um]m+{]~E}3絾r %{Gʚ~#OzT=?}pckx7ʹş}Xsࡍ15o{F!pji!h{sXq% ,b16kR8|s֯5<_ؽ9\>2ϬR? x+{53qSg,kϻf</ K KurO9.wF9[~X89k09[-xrvaj-,+Ҽ2|7;{KYk{ʼF:"^϶R^N4w2;?U괾R{Z\.zs~7J oG[s, Vamw__|)a5;̋;֍>f=U(kŖRG^lO㜍uqEk`yomk,mw[.sϨ|5녯n[-x񻦼![fYBg){)Oa$p>[6ߗjSo=e}ֺUe.߹ ?֣L2ۀ=5Qs[|6=OxVzq.\BH;jI&2'əsrr?IN2dLitڱ"^Z(UETRTJA](.\ Ew^QAy;yUb-!ϹBv}s^ſaKd/#{6gg;)y)v}=MFx8oմȚX<Ñö?\{6Q|,Fְ^2MKe[#%2gyϐ~ ψKzn^}ns}cH^XCz39/;~6ٳyvZh8w& b6tӑTA[s,Dc}ճ|Zqx}1r_7|Xdo\w-i|&2Ft8·f빴Ldfۑܩ; xMWxG{Bй/zQ$W>Z~Z+wyNo# l.2>EKzFÑ=sj'9|jaӅDo/Pލ >Ym ޞ[Ž1nTng G!ZÑ|ro"KC*ݿٓnG)ml鞴WŹ?ٓ<;eyhG=:4C*vC:o`3=d :[?Vۈ笿ϕ+&[_*߫z~pIa]Bu;MFhyVg t;}s kȢG# | >ਮytp}Q졞s<ϻ4 ;fM霞e ;:7w99? E{8?<{fSsg {jD֒tLx/},rmGr48mxFnc{;^䜫Y@68'"r,I.|7x {bEsT^gӖ g4C|d;{=sɗY~ta}=Oqb 4tlG֕9<- ~*ٿ9Ϸ6gp[;fz_]Cq#V}ѼQtyݽӭq >[|*zI3|f G꭪} kָ{e2uo ^^v#kkk^7edWB'*{?_LqȺʖ*FkՌ]/+{mGjb[#yVnřtkT̩)o ~d˽gpJ*7jOU=}ZXU:\11oLq=`X+x6yn0ϙg &" T(_Ʈ9Vƣt;s%ĖU Z?*x_W𕊦zo*F??UO x;+^~W} ~W[oV XA59wA RUu=*j褐}}(:Tk/fonŠj,~{K]4Ģb槻Cg#s%WMfq|']13_^L9~c ,~8v, CZb |)/+ΉoT;ί'x>ld}LjOw=O)G45xj6X[ulxIgSbKd%rSmS횄g"1yS}]a,r>: q^U6\y!_/FUNC\I }{ܻ{h}E_}9+uzx[l |<;o)<9Cx=sг̳ZBa ԽBvx ;cl}Ql%b:.e.zNsy)rDV=U@~—Y$E2>=G duoCYx̻Y6I|1RZuCpm4o[o,DzYGs=f"swϺǻ8뜻~<_[7Ͻdy"g1@'ލ\hg3LSIrt#hs~!G^ýz#5q3 k3mayj Pe 7#G*gF -:@g =EXX[CpZ; #OlW5ͽ7xFy_l^cl x/3Y< 3M ,@hՅ*.R䲑"}WOE*#J/[΋G1W:|ߜw_r%=#F)o<)؀8xULz"cN]s$koW0FpNlaOcmӜ){r<3b8Yblyg bx4-V磞}Xؓv1Q-Ϋ!K{s^,w^XOc/IԎX2bXe2+M‡gŰl$!Ctp ?W!ڈ>ݬXCwm:1ֻ=~V^X39؀fN,۱ϳMvk:0+1ܙhrWɤX>{Y7馶~ߏ{ r7=W:Zw?ߎ‡Ӡz, bqZU[a&1|yb} -7WAXRYo$R܂3bؔsRYX[kr Ɖ -20c*ι.'uݲ+Vc8} C~FyE4 %R;+͂&y|ߝrC8^q*Wgp%yњnH)y6ƙX,!jK5{!UQg=I>9fb,Źc1~7%'7/ iO<۟\\ějt! } փAo|$V;'1L"/M: * kO甗 ނlХOM+b9) }|^̳xg+ЩlC ]B {q"`x *qMiWn*9ȩ fqxi p#e{4W2y6<4#Ck48_pkvZOam~]cK'󞸉,6Cik2[a :u 3%<\Y؞u,6KjXit(YְX}8kL0oRkxrŻT> bb[곎;kM ]''R>_V o\,o|>.ӲKeLc7wQu+ř9Ȱoq< b8=Cy1_lVe=P{£sum}Y&쥶}2d[I{tT/_AֈXd};[f5.þ;E>b9sJ@E7Iv$]kaoW`_ggAGke<1,W^M2qڟۚ{*oSbqI?-bvRpa/jI f@/81 h( о$Vg'%v5qN8=b='b4 y4pYgŰ7 s{zF|vI-b}<>"_`m/A"h)Q2Up Aݑv$K&!WLD~Z\{XCK5 ~nDWg:f&_v65vninn<ޮ$IE~Qq=AR;~<>dɆTWgljhf" &^J^.\(M;Kb5+}[ywݏj[fqMk;]W#Ѧ#PLteE[}C}CGZ)/rD,7wv=8omnv5WD]ޝ,6E5CԭchF=dsoTw?U3 uE Nl!(6>LcSث3 @a1a`0̭ƎS['u:Z7T.xJN.lF]Tڪ못Uʪ /!q]7#={sm醰)h,ΌMz_}O4^^;vFevenk(om˷&[[sW|"^_llP^ DsO\.%G†gr]ZUiqġGDlWH7DήTGmֽ酅_[v2T޳=rZkK䥽R6jkKߚY}us֞ٺZTmdfprlby)J(+Qנ+D{[c \*<6ft< jX kt3Lwܿ?p/]z_aމO[8>wߢKa`ahNS axjm0[\[Lloqf:T&J~o-KчKﶌN/zf{7ږۙ}ӻao}9'.(9A.De |/?VTi/]zRlkX{v7Nݾv6wmsijobaiT,._(Uݺdzg;ޥS]2/Rۮq yz¦8lhɇ|t-۵0:qpҙs-o%T6ӖJ^>}퓙| 3_ q=_ &r:J3a1w3ѩS'qrM>_aZQ2O&ξ8=trpWw'^;8)Lc}kՙ_Y+kSXih[NLFJgN+Y9{') wYIdd7JZ餄 g'}POLZto=]; s/m,MM.ΌJWΗ?ZW'JFO~?ڻqF&o/Z \ϽH{3x|Db?+AИ0 AAF AtaW HOu'A!_o>o6## endstream endobj 70 0 obj << /Type /Font /Subtype /CIDFontType2 /BaseFont /IEBFOI+Batang /FontDescriptor 68 0 R /CIDSystemInfo << /Registry (Adobe)/Ordering (Identity)/Supplement 0 >> /DW 1000 /W [ 71 [ 333 ] 85 [ 291 ] 87 96 595 ] >> endobj 71 0 obj << /Filter /FlateDecode /Length 241 >> stream HTPR -u, xMjǘhυMdCHH 9sZ:0n ႣPqF_MMn#N4 8ǰMw[S?R[ mBƁrBEx$#}8{0H;"4ELZN [r9IڝUivH쭞fCG%t_q\7z_Gz endstream endobj 72 0 obj << /S /D >> endobj 73 0 obj << /Nums [ 0 72 0 R ] >> endobj 74 0 obj << /Type /Pages /Kids [ 82 0 R 1 0 R 4 0 R 7 0 R 10 0 R 13 0 R 16 0 R 19 0 R 22 0 R 25 0 R ] /Count 10 /Parent 75 0 R >> endobj 75 0 obj << /Type /Pages /Kids [ 74 0 R 76 0 R 77 0 R ] /Count 22 >> endobj 76 0 obj << /Type /Pages /Kids [ 28 0 R 31 0 R 34 0 R 37 0 R 40 0 R 43 0 R 46 0 R 49 0 R 52 0 R 55 0 R ] /Count 10 /Parent 75 0 R >> endobj 77 0 obj << /Type /Pages /Kids [ 58 0 R 61 0 R ] /Count 2 /Parent 75 0 R >> endobj 78 0 obj << /CreationDate (D:20100915102345-04'00') /ModDate (D:20100915102345-04'00') /Producer (Acrobat Distiller 5.0.5 \(Windows\)) /Author (ukang) /Creator (PScript5.dll Version 5.2.2) /Title (Microsoft Word - PegasusUserGuide.doc) >> endobj 79 0 obj << /Type /Metadata /Subtype /XML /Length 1093 >> stream Microsoft Word - PegasusUserGuide.doc endstream endobj xref 0 80 0000000000 65535 f 0000041783 00000 n 0000041934 00000 n 0000042092 00000 n 0000043663 00000 n 0000043814 00000 n 0000043960 00000 n 0000045711 00000 n 0000045862 00000 n 0000046008 00000 n 0000046860 00000 n 0000047014 00000 n 0000047161 00000 n 0000048925 00000 n 0000049079 00000 n 0000049214 00000 n 0000049927 00000 n 0000050081 00000 n 0000050228 00000 n 0000051708 00000 n 0000051862 00000 n 0000052009 00000 n 0000054119 00000 n 0000054273 00000 n 0000054408 00000 n 0000056402 00000 n 0000056556 00000 n 0000056691 00000 n 0000058533 00000 n 0000058687 00000 n 0000058822 00000 n 0000060875 00000 n 0000061029 00000 n 0000061164 00000 n 0000062970 00000 n 0000063124 00000 n 0000063259 00000 n 0000065205 00000 n 0000065359 00000 n 0000065494 00000 n 0000067358 00000 n 0000067512 00000 n 0000067647 00000 n 0000069762 00000 n 0000069916 00000 n 0000070051 00000 n 0000071581 00000 n 0000071735 00000 n 0000071870 00000 n 0000073826 00000 n 0000073980 00000 n 0000074115 00000 n 0000075802 00000 n 0000075956 00000 n 0000076091 00000 n 0000077822 00000 n 0000077976 00000 n 0000078123 00000 n 0000080181 00000 n 0000080335 00000 n 0000080482 00000 n 0000082564 00000 n 0000082718 00000 n 0000082853 00000 n 0000083330 00000 n 0000083798 00000 n 0000083949 00000 n 0000084175 00000 n 0000104337 00000 n 0000104538 00000 n 0000138292 00000 n 0000138526 00000 n 0000138841 00000 n 0000138872 00000 n 0000138916 00000 n 0000139060 00000 n 0000139141 00000 n 0000139288 00000 n 0000139377 00000 n 0000139625 00000 n trailer << /Size 80 /ID[<872ae25ffeab6e4d751ff129e5437000><0dc2912904aa0c56f9814262163531e4>] >> startxref 173 %%EOF PEGASUS/README0000644000000000000000000000410611443145611011544 0ustar rootroot PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Version: 2.0 Date: Sep 6, 2010 Main Contact: U Kang(ukang@cs.cmu.edu) PEGASUS is a Peta-scale graph mining system on hadoop, fully written in Java. It calculates the degree distribution, PageRank, RWR(Random Walk with Restart) scores, radii/diameter, and connected components of very large graphs with more than billions of nodes and edges. The details of PEGASUS can be found in the following paper: U Kang, Charalampos E. Tsourakakis, and Christos Faloutsos. PEGASUS: A Peta-Scale Graph Mining System - Implementation and Observations. IEEE International Conference On Data Mining 2009, Miami, Florida, USA. If your work uses or refers to PEGASUS, please cite the papers using the following bibtex entries: @article{PegasusICDM2009, title = {PEGASUS: A Peta-Scale Graph Mining System - Implementation and Observations}, author = { Kang, U and Tsourakakis, C.E and Faloutsos, C.}, year = {2009}, journal = {IEEE International Conference On Data Mining}, } @article{PegasusKAIS, title = {PEGASUS: Mining Peta-Scale Graphs}, author = { Kang, U and Tsourakakis,C.E and Faloutsos,C.}, year = {2010}, journal = {Knowledge and Information Systems}, } If you use PEGASUS for research or commercial purposes, please let us know your institution(company) and whether it's ok to mention it among the users of PEGASUS. For questions on PEGASUS, please contact . For installation, running, and rebuilding PEGASUS, see the PEGASUS user's guide PDF file. PEGASUS/run_ccmptblk.sh0000755000000000000000000000274611443145611013716 0ustar rootroot# Program : run_ccmptblk.sh # Description : Run HCC-BLOCK, a block version of HCC which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 4 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge_file_path] [block_width] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [block_width] : block width. usually set to 16. echo 1>&2 ex: $0 6 3 cc_edge 16 exit 127 fi #### Step 1. Generate Init Vector hadoop dfs -rmr cc_initvector hadoop jar pegasus-2.0.jar pegasus.ConCmptIVGen cc_initvector $1 $2 #### Step 2. Run mv_prep hadoop dfs -rmr cc_iv_block hadoop dfs -rmr cc_edge_block ./run_mvprep.sh cc_initvector cc_iv_block $1 $4 $2 msc makesym hadoop dfs -rmr cc_initvector ./run_mvprep.sh cc_edge cc_edge_block $1 $4 $2 null makesym #### Step 3. Run pegasus.ConCmptBlock rm -rf concmpt_output_temp hadoop dfs -rmr concmpt_curbm hadoop dfs -rmr concmpt_tempbm hadoop dfs -rmr concmpt_nextbm hadoop dfs -rmr concmpt_output hadoop dfs -rmr concmpt_summaryout hadoop dfs -rmr concmpt_curbm_unfold hadoop jar pegasus-2.0.jar pegasus.ConCmptBlock cc_edge_block cc_iv_block concmpt_tempbm concmpt_nextbm concmpt_output $1 $2 fast $4 rm -rf concmpt_output_temp PEGASUS/run_ccmpt.sh0000755000000000000000000000173411443145611013221 0ustar rootroot# Program : run_ccmpt.sh # Description : Run HCC, a connected component algorithm on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 3 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge_file_path] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 ex: $0 6 3 cc_edge exit 127 fi rm -rf concmpt_output_temp hadoop dfs -rmr concmpt_curbm hadoop dfs -rmr concmpt_tempbm hadoop dfs -rmr concmpt_nextbm hadoop dfs -rmr concmpt_output hadoop dfs -rmr concmpt_summaryout hadoop jar pegasus-2.0.jar pegasus.ConCmpt $3 concmpt_curbm concmpt_tempbm concmpt_nextbm concmpt_output $1 $2 new makesym rm -rf concmpt_output_temp PEGASUS/run_dd.sh0000755000000000000000000000114011443145611012471 0ustar rootroot# Program : run_dd.sh # Description : Run DegDist, a degree distribution computation on hadoop. if [ $# -ne 3 ]; then echo 1>&2 Usage: $0 [in or out or inout] [#_of_reducer] [HDFS edge_file_path] echo 1>&2 [in or out or inout] : type of degree\(in, out, inout\) to compute echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 ex: $0 in 16 dd_edge exit 127 fi hadoop dfs -rmr dd_node_deg hadoop dfs -rmr dd_deg_count hadoop jar pegasus-2.0.jar pegasus.DegDist $3 dd_node_deg dd_deg_count $1 $2 PEGASUS/run_hadiblk.sh0000755000000000000000000000353311443145611013510 0ustar rootroot# Program : run_hadiblk.sh # Description : Run HADI-BLOCK, a block version of HADI which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 6 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge_file_path] [makesym or nosym] [block_width] [enc or noenc] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [makesym or nosym] : makesym-make undirected graph, nosym-use directed graph echo 1>&2 [block_width] : block width. usually set to 16. echo 1>&2 [enc or noenc] : use bit shuffle encoding or not. echo 1>&2 ex: $0 6 3 hadi_edge makesym 16 noenc exit 127 fi #### Step 1. Generate Init Vector hadoop dfs -rmr hadi_initvector hadoop jar pegasus-2.0.jar pegasus.HadiIVGen hadi_initvector $1 $2 32 $6 #### Step 2. Run mv_prep hadoop dfs -rmr hadi_iv_block hadoop dfs -rmr hadi_edge_block ./run_mvprep.sh hadi_initvector hadi_iv_block $1 $5 $2 s $4 hadoop dfs -rmr hadi_initvector ./run_mvprep.sh $3 hadi_edge_block $1 $5 $2 null $4 #### Step 3. Run pegasus.HadiBlock hadoop dfs -rmr hadi_tempbm_block hadoop dfs -rmr hadi_nextbm_block hadoop dfs -rmr hadi_output_block radius_path=hadi_radius_block radius_summary_path=hadi_radius_block_summary hadoop dfs -rmr $radius_path hadoop dfs -rmr $radius_summary_path hadoop jar pegasus-2.0.jar pegasus.HadiBlock hadi_edge_block hadi_iv_block hadi_tempbm_block hadi_nextbm_block hadi_output_block $1 32 $2 $6 newbm $5 max local_output_path=hadi_output_block$1_tempblk rm -rf $local_output_path echo "Radius Summary:" echo "Rad r Count(r)" hadoop dfs -cat $radius_summary_path/* PEGASUS/run_hadi.sh0000755000000000000000000000257011443145611013017 0ustar rootroot# Program : run_hadi.sh # Description : Run HADI, a diameter/radii estimation algorithm on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 5 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge_file_path] [makesym or nosym] [enc or noenc] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [makesym or nosym] : makesym-duplicate reverse edges, nosym-use original edge file echo 1>&2 [enc or noenc] : use bit-shuffle encoding or not echo 1>&2 ex: $0 6 3 hadi_edge makesym noenc exit 127 fi rm -rf hadi_output_temp* hadoop dfs -rmr hadi_curbm hadoop dfs -rmr hadi_tempbm hadoop dfs -rmr hadi_nextbm hadoop dfs -rmr hadi_output radius_path=hadi_radius radius_summary_path=hadi_radius_summary hadoop dfs -rmr $radius_path hadoop dfs -rmr $radius_summary_path hadoop jar pegasus-2.0.jar pegasus.Hadi $3 hadi_curbm hadi_tempbm hadi_nextbm hadi_output $1 32 $2 $5 newbm $4 max local_output_path=hadi_output$1_temp rm -rf $local_output_path echo "Radius Summary:" echo "Rad r Count(r)" hadoop dfs -cat $radius_summary_path/* PEGASUS/run_jointable_pegasus.sh0000755000000000000000000000111211443145611015577 0ustar rootroot# Program : run_jointable.sh # Description : join tables. similar to the `join' command in UNIX. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -lt 5 ]; then echo 1>&2 Usage: $0 [#_of_reducers] [OuterJoin or SemiJoin] [HDFS output path] [HDFS input path 1] [input path 2...] echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop exit 127 fi hadoop jar pegasus-2.0.jar pegasus.JoinTablePegasus $* PEGASUS/run_mvprep.sh0000755000000000000000000000100711443145611013415 0ustar rootroot# Program : run_mvprep.sh # Description : Run MatvecPrep which preprocess normal edge files or vector files to block forms. if [ $# -ne 7 ]; then echo 1>&2 Usage: $0 [input HDFS path] [output HDFS path] [#_of_rows] [block size] [#_of_reducers] [out_prefix or null] [makesym or nosym] echo 1>&2 Ex1: $0 ya_edge ya_blockedge 1413511390 32 100 null makesym echo 1>&2 Ex2: $0 mv_edge mv_outedge 5 2 3 msc nosym exit 127 fi rm -rf $2 hadoop dfs -rmr $2 hadoop jar pegasus-2.0.jar pegasus.matvec.MatvecPrep $* PEGASUS/run_prblk.sh0000755000000000000000000000302611443145611013221 0ustar rootroot# Program : run_prblk.sh # Description : Run PageRank-BLOCK, a block version of PageRank calculation on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 5 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge path] [makesym or nosym] [block width] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [makesym or nosym] : makesym for directed graph, nosym for undirected graph echo 1>&2 [block_width] : block width. usually set to 16. echo 1>&2 Ex: $0 16 3 pr_edge makesym 2 exit 127 fi #### Step 1. Generate Init Vector hadoop dfs -rmr pr_input hadoop dfs -rmr pr_initvector hadoop jar pegasus-2.0.jar pegasus.PagerankInitVector pr_initvector $1 $2 hadoop dfs -rmr pr_input #### Step 2. Run mv_prep hadoop dfs -rmr pr_iv_block ./run_mvprep.sh pr_initvector pr_iv_block $1 $5 $2 s $4 hadoop dfs -rmr pr_initvector ./run_prprep.sh $3 pr_edge_colnorm $2 $4 hadoop dfs -rmr pr_edge_block ./run_mvprep.sh pr_edge_colnorm pr_edge_block $1 $5 $2 null nosym hadoop dfs -rmr pr_edge_colnorm #### Step 3. Run pegasus.PagerankBlock echo "Now running pegasus.PagerankBlock..." hadoop jar pegasus-2.0.jar pegasus.PagerankBlock pr_edge_block pr_iv_block pr_tempmv_block pr_output_block $1 $2 1024 $5 PEGASUS/run_prprep.sh0000755000000000000000000000153111443145611013416 0ustar rootroot# Program : run_prprep.sh # Description : Run PagerankPrep which converts the input edge file to the column-normalized adjacency matrix. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 4 ]; then echo 1>&2 Usage: $0 [input HDFS path] [output HDFS path] [#_of_reducers] [makesym or nosym] echo 1>&2 [input HDFS path] : HDFS directory where edge file is located echo 1>&2 [output HDFS path] : HDFS directory where the result is to be saved echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [makesym or nosym] : makesym-duplicate reverse edges, nosym-use exit 127 fi hadoop dfs -rmr $2 hadoop jar pegasus-2.0.jar pegasus.PagerankPrep $1 $2 $3 $4 PEGASUS/run_pr.sh0000755000000000000000000000172111443145611012530 0ustar rootroot# Program : run_pr.sh # Description : Run PageRank-plain, a PageRank calculation algorithm on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 4 ]; then echo 1>&2 Usage: $0 [#_of_nodes] [#_of_reducers] [HDFS edge_file_path] [makesym or nosym] echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop. echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [makesym or nosym] : makesym-duplicate reverse edges, nosym-use original edge file echo 1>&2 ex: $0 16 5 pr_edge makesym exit 127 fi hadoop dfs -rmr pr_tempmv hadoop dfs -rmr pr_output hadoop dfs -rmr pr_minmax hadoop dfs -rmr pr_distr hadoop jar pegasus-2.0.jar pegasus.PagerankNaive $3 pr_tempmv pr_output $1 $2 1024 $4 new PEGASUS/run_rwrblk.sh0000755000000000000000000000316211443145611013413 0ustar rootroot# Program : run_rwrblk.sh # Description : Run RWR-BLOCK, a block version of RWR calculation on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 7 ]; then echo 1>&2 Usage: $0 [HDFS edge path] [query path] [#_of_nodes] [#_of_reducers] [makesym or nosym] [block width] [c] echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [query path] : HDFS directory containing query nodes echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop echo 1>&2 [makesym or nosym] : makesym for directed graph, nosym for undirected graph echo 1>&2 [block_width] : block width. usually set to 16. echo 1>&2 [c] : mixing component. Default value is 0.85. echo 1>&2 ex: $0 rwr_edge rwr_query 16 3 nosym 8 0.85 exit 127 fi #### Step 1. Generate Init Vector hadoop dfs -rmr pr_input hadoop dfs -rmr rwr_initvector hadoop jar pegasus-2.0.jar pegasus.PagerankInitVector rwr_initvector $3 $4 hadoop dfs -rmr pr_input #### Step 2. Run mv_prep hadoop dfs -rmr rwr_iv_block ./run_mvprep.sh rwr_initvector rwr_iv_block $3 $6 $4 s $5 hadoop dfs -rmr rwr_initvector ./run_prprep.sh $1 rwr_edge_colnorm $4 $5 hadoop dfs -rmr rwr_edge_block ./run_mvprep.sh rwr_edge_colnorm rwr_edge_block $3 $6 $4 null nosym hadoop dfs -rmr rwr_edge_colnorm #### Step 3. Run RWR echo "Now running RWR..." hadoop jar pegasus-2.0.jar pegasus.RWRBlock rwr_edge_block rwr_iv_block $2 $3 $4 1024 $6 $7 PEGASUS/run_rwr.sh0000755000000000000000000000211711443145611012721 0ustar rootroot# Program : run_pr.sh # Description : Run RWR-plain, a RWR calculation algorithm on hadoop. which hadoop > /dev/null status=$? if test $status -ne 0 ; then echo "" echo "Hadoop is not installed in the system." echo "Please install Hadoop and make sure the hadoop binary is accessible." exit 127 fi if [ $# -ne 7 ]; then echo 1>&2 Usage: $0 [HDFS edge_file_path] [query path] [#_of_nodes] [#_of_reducers] [makesym or nosym] [new or contNN] [c] echo 1>&2 [HDFS edge_file_path] : HDFS directory where edge file is located echo 1>&2 [query path] : HDFS directory containing query nodes echo 1>&2 [#_of_nodes] : number of nodes in the graph echo 1>&2 [#_of_reducers] : number of reducers to use in hadoop. echo 1>&2 [makesym or nosym] : makesym-duplicate reverse edges, nosym-use original edge file echo 1>&2 [new or contNN] : starts from scratch, or continue from the iteration NN echo 1>&2 [c] : mixing component. Default value is 0.85. echo 1>&2 ex: $0 rwr_edge rwr_query 16 3 nosym new 0.85 exit 127 fi hadoop jar pegasus-2.0.jar pegasus.RWRNaive $1 $2 $3 $4 1024 $5 $6 $7 PEGASUS/src/0000755000000000000000000000000011443145611011452 5ustar rootrootPEGASUS/src/pegasus/0000755000000000000000000000000011443145611013121 5ustar rootrootPEGASUS/src/pegasus/column_joiner/0000755000000000000000000000000011443145611015764 5ustar rootrootPEGASUS/src/pegasus/column_joiner/JoinTablePegasus.java0000644000000000000000000001540711443145611022035 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: JoinTablePegasus.java - Join Columns Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class JoinTablePegasus extends Configured implements Tool { static int OuterJoin = 1, SemiJoin = 2; public static class MapPass1 extends MapReduceBase implements Mapper { int number_tables = 0; int column_index = 0; public void configure(JobConf job) { number_tables = Integer.parseInt(job.get("number_tables")); System.out.println("MapPass1 : configure is called. number_tables=" + number_tables ); String input_file = job.get("map.input.file"); System.out.println("input_file=" + input_file); String path_name =""; for(int i = 1; i <= number_tables; i++) { path_name = job.get("path" + i); if( input_file.indexOf( path_name ) >= 0 ) { column_index = i; break; } } System.out.println("Column index = " + column_index + ", path_name=" + path_name); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in input file return; String[] line = line_text.split("[\t]"); int tab_pos = line_text.indexOf("\t"); String second_str = line_text.substring(tab_pos+1); if( second_str.charAt(0) == 'v' ) second_str = second_str.substring(1); else if( second_str.startsWith("bsf")) { int colon_pos = second_str.indexOf(":"); second_str = second_str.substring(colon_pos+1); } String value_str = "" + column_index + " " + second_str; output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(value_str) ); } } public static class RedPass1 extends MapReduceBase implements Reducer { int number_tables = 0; int join_type = OuterJoin; String sep = "\t"; public void configure(JobConf job) { number_tables = Integer.parseInt(job.get("number_tables")); join_type = Integer.parseInt(job.get("join_type")); System.out.println("RedPass1 : configure is called. number_tables=" + number_tables ); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { Map val_map = new HashMap(); while (values.hasNext()) { String cur_string = values.next().toString(); final String[] tokens = cur_string.split(" "); int column_index = Integer.parseInt(tokens[0]); val_map.put(column_index, tokens[1]); } String output_values = "" + key.get() ;//+ " "; int received_count = 0; for(int i = 1; i <= number_tables; i++) { String saved_val = val_map.get(i); if( i <= number_tables ) output_values += sep; if( saved_val != null ) { output_values += saved_val; received_count++; } } if( join_type == OuterJoin ) output.collect(new Text(output_values), new Text("") ); else if( received_count == number_tables ) // SemiJoin output.collect(new Text(output_values), new Text("") ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path output_path = null; ArrayList input_paths = new ArrayList(); protected int nreducer = 10; protected int number_tables = 1; // number of values to join int join_type = OuterJoin; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new JoinTablePegasus(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("JoinTablePegasus <# of reducers> ..."); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length < 5 ) { return printUsage(); } nreducer = Integer.parseInt(args[0]); if( args[1].startsWith("Semi") ) join_type = SemiJoin; output_path = new Path(args[2]); number_tables = args.length - 3; System.out.println("Output path = " + args[2] + ", Nreducer =" + nreducer + ", number_tables=" + number_tables ); if( join_type == OuterJoin ) System.out.println("Join type = OuterJoin"); else System.out.println("Join type = SemiJoin"); for(int i = 3; i < args.length; i++) { System.out.println(" input path : " + args[i] ); input_paths.add( new Path(args[i]) ); } // run job JobClient.runJob(configPass1()); System.out.println("Joined table is in HDFS " + args[2]); return 0; } // Configure pass1 protected JobConf configPass1() throws Exception { final JobConf conf = new JobConf(getConf(), JoinTablePegasus.class); conf.set("number_tables", "" + number_tables); conf.set("join_type", "" + join_type); conf.setJobName("JoinTablePegasus"); conf.setMapperClass(MapPass1.class); conf.setReducerClass(RedPass1.class); int i = 1; Iterator iter = input_paths.iterator(); while( iter.hasNext() ){ Path cur_path = iter.next(); FileInputFormat.addInputPath(conf, cur_path); conf.set("path" + i, cur_path.toString() ); i++; } FileOutputFormat.setOutputPath(conf, output_path); final FileSystem fs = FileSystem.get(conf); fs.delete(output_path); conf.setNumReduceTasks( nreducer ); conf.setMapOutputKeyClass(IntWritable.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/heigen/0000755000000000000000000000000011443145611014360 5ustar rootrootPEGASUS/src/pegasus/heigen/NormalizeVector.java0000644000000000000000000000651211443145611020352 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: NormalizeVector.java - Normalize a vector so that the sum of the elements is 1. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class NormalizeVector extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path input_path = null; protected Path output_path = null; protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new NormalizeVector(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("NormalizeVector <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 4 ) { return printUsage(); } int i; input_path = new Path(args[0]); output_path = new Path(args[1]); nreducers = Integer.parseInt(args[2]); double additional_multiplier = Double.parseDouble(args[3]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Normalizing a vector. input_path=" + args[0] + ", output_path=" + args[1] + "\n"); final FileSystem fs = FileSystem.get(getConf()); FileSystem lfs = FileSystem.getLocal(getConf()); // compute l1 norm String[] new_args = new String[1]; new_args[0] = args[0]; ToolRunner.run(getConf(), new L1norm(), new_args); double scalar = PegasusUtils.read_l1norm_result(getConf()); lfs.delete(new Path("l1norm"), true); System.out.println("L1norm = " + scalar ); // multiply by scalar new_args = new String[2]; new_args[0] = args[0]; new_args[1] = new String("" + additional_multiplier/scalar); ToolRunner.run(getConf(), new ScalarMult(), new_args); fs.delete(output_path, true); fs.rename(new Path("smult_output"), output_path ); System.out.println("\n[PEGASUS] Normalization completed. The normalized vecotr is saved in HDFS " + args[1] + ".\n"); return 0; } } PEGASUS/src/pegasus/heigen/SaxpyBlock.java0000644000000000000000000001650411443145611017310 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: SaxpyBlock.java - Compute Saxpy operation which is to compute ax + b Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // y = y + ax public class SaxpyBlock extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: make initial pagerank vector ////////////////////////////////////////////////////////////////////// // MapStage1: public static class MapStage1 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private boolean isYpath = false; private boolean isXpath = false; private double a; protected int block_width = 16; public void configure(JobConf job) { String y_path = job.get("y_path"); String x_path = job.get("x_path"); a = Double.parseDouble(job.get("a")); block_width = Integer.parseInt(job.get("block_width")); String input_file = job.get("map.input.file"); if(input_file.contains(y_path)) isYpath = true; else if(input_file.contains(x_path)) isXpath = true; System.out.println("SaxpyBlock.MapStage1: map.input.file = " + input_file + ", isYpath=" + isYpath + ", isXpath=" + isXpath + ", a=" + a + ", block_width=" + block_width); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); String val_str = line_text.substring(tabpos+1); char fc = val_str.charAt(0); if( fc == 's' || fc == 'v') val_str = val_str.substring(1); if( isYpath ) { output.collect( new IntWritable(out_key) , new Text(val_str) ); } else if( isXpath ) { double []xvec = MatvecUtils.decodeBlockVector(val_str, block_width) ; for(int i = 0; i < block_width; i++) { if( xvec[i] != 0 ) xvec[i] = xvec[i] * a; } String new_val_str = MatvecUtils.encodeBlockVector(xvec, block_width); output.collect( new IntWritable(out_key) , new Text( new_val_str ) ); } } } // RedStage1 public static class RedStage1 extends MapReduceBase implements Reducer { protected int block_width = 16; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("DotProductBlock:RedStage1 : configure is called. block_width=" + block_width); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double[] v1 = null, v2 = null; double[] result = new double[block_width]; int input_index= 0; while (values.hasNext()) { String cur_value_str = values.next().toString(); if(input_index == 0 ) { v1 = MatvecUtils.decodeBlockVector( cur_value_str, block_width ); input_index++; } else v2 = MatvecUtils.decodeBlockVector( cur_value_str, block_width ); } int i ; if( v1 != null && v2 != null ) { for(i = 0; i < block_width ; i++) result[i] = v1[i] + v2[i]; } else if( v1 != null && v2 == null ) { for(i = 0; i < block_width ; i++) result[i] = v1[i]; } else if( v1 == null && v2 != null ) { for(i = 0; i < block_width ; i++) result[i] = v2[i]; } else { for(i = 0; i < block_width ; i++) result[i] = 0; } String new_val_str = MatvecUtils.encodeBlockVector(result, block_width); if( new_val_str.length() > 0 ) output.collect( key , new Text( new_val_str ) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; int block_width = 16; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new SaxpyBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("SaxpyBlock <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 5 ) { return printUsage(); } int ret_val = 0; nreducers = Integer.parseInt(args[0]); Path y_path = new Path(args[1]); Path x_path = new Path(args[2]); double param_a = Double.parseDouble(args[3]); block_width = Integer.parseInt(args[4]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing SaxpyBlock. y_path=" + y_path.getName() + ", x_path=" + x_path.getName() + ", a=" + param_a + "\n"); final FileSystem fs = FileSystem.get(getConf()); Path saxpy_output = new Path("saxpy_output"); if( y_path.getName().equals("saxpy_output") ) { System.out.println("saxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1"); saxpy_output = new Path("saxpy_output1"); ret_val = 1; } fs.delete(saxpy_output); JobClient.runJob( configSaxpy(y_path, x_path, saxpy_output, param_a) ); System.out.println("\n[PEGASUS] SaxpyBlock computed. Output is saved in HDFS " + saxpy_output.getName() + "\n"); return ret_val; // return value : 1 (output path is saxpy_output1) // 0 (output path is saxpy_output) } // Configure SaxpyBlock protected JobConf configSaxpy (Path py, Path px, Path saxpy_output, double a) throws Exception { final JobConf conf = new JobConf(getConf(), SaxpyBlock.class); conf.set("y_path", py.getName()); conf.set("x_path", px.getName()); conf.set("a", "" + a); conf.set("block_width", "" + block_width); conf.setJobName("Lanczos.SaxpyBlock"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, py, px); FileOutputFormat.setOutputPath(conf, saxpy_output); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/heigen/Saxpy.java0000644000000000000000000001412511443145611016332 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: Saxpy.java - Compute Saxpy operation which is to compute ax + b Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // y = y + ax public class Saxpy extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: make initial pagerank vector ////////////////////////////////////////////////////////////////////// // MapStage1: public static class MapStage1 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private boolean isYpath = false; private boolean isXpath = false; private double a; public void configure(JobConf job) { String y_path = job.get("y_path"); String x_path = job.get("x_path"); a = Double.parseDouble(job.get("a")); String input_file = job.get("map.input.file"); if(input_file.contains(y_path)) isYpath = true; else if(input_file.contains(x_path)) isXpath = true; System.out.println("Saxpy.MapStage1: map.input.file = " + input_file + ", isYpath=" + isYpath + ", isXpath=" + isXpath + ", a=" + a); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); double out_val = 0; if( line_text.charAt(tabpos+1) == 'v') { out_val = Double.parseDouble(line_text.substring(tabpos+2)); } else { out_val = Double.parseDouble(line_text.substring(tabpos+1)); } if( isYpath ) { output.collect( new IntWritable(out_key) , new DoubleWritable(out_val) ); } else if( isXpath ) { output.collect( new IntWritable(out_key) , new DoubleWritable( a * out_val ) ); } } } // RedStage1 public static class RedStage1 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i = 0; double val_double[] = new double[2]; val_double[0] = 0; val_double[1] = 0; while (values.hasNext()) { val_double[i] = values.next().get(); i++; } double result = val_double[0] + val_double[1]; if( result != 0 ) output.collect(key, new DoubleWritable(result)); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new Saxpy(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("Saxpy <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 4 ) { return printUsage(); } int ret_val = 0; nreducers = Integer.parseInt(args[0]); Path y_path = new Path(args[1]); Path x_path = new Path(args[2]); double param_a = Double.parseDouble(args[3]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing Saxpy. y_path=" + y_path.getName() + ", x_path=" + x_path.getName() + ", a=" + param_a + "\n"); final FileSystem fs = FileSystem.get(getConf()); Path saxpy_output = new Path("saxpy_output"); if( y_path.getName().equals("saxpy_output") ) { System.out.println("saxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1"); saxpy_output = new Path("saxpy_output1"); ret_val = 1; } fs.delete(saxpy_output); JobClient.runJob( configSaxpy(y_path, x_path, saxpy_output, param_a) ); System.out.println("\n[PEGASUS] Saxpy computed. Output is saved in HDFS " + saxpy_output.getName() + "\n"); return ret_val; // return value : 1 (output path is saxpy_output1) // 0 (output path is saxpy_output) } // Configure Saxpy protected JobConf configSaxpy (Path py, Path px, Path saxpy_output, double a) throws Exception { final JobConf conf = new JobConf(getConf(), Saxpy.class); conf.set("y_path", py.getName()); conf.set("x_path", px.getName()); conf.set("a", "" + a); conf.setJobName("Lanczos_pass_saxpy"); conf.setMapperClass(Saxpy.MapStage1.class); conf.setReducerClass(Saxpy.RedStage1.class); FileInputFormat.setInputPaths(conf, py, px); FileOutputFormat.setOutputPath(conf, saxpy_output); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } } PEGASUS/src/pegasus/heigen/ScalarMult.java0000644000000000000000000001263511443145611017301 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: ScalarMult.java - Multiply a vector with a scalar. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // y = sy public class ScalarMult extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: make initial pagerank vector ////////////////////////////////////////////////////////////////////// // MapStage1: public static class MapStage1Double extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private boolean isYpath = false; private boolean isXpath = false; private double s; public void configure(JobConf job) { s = Double.parseDouble(job.get("s")); System.out.println("ScalarMult.MapStage1: s = " + s); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); double out_val = 0; if( line_text.charAt(tabpos+1) == 'v') { out_val = Double.parseDouble(line_text.substring(tabpos+2)); } else { out_val = Double.parseDouble(line_text.substring(tabpos+1)); } output.collect(new IntWritable(out_key), new DoubleWritable(s*out_val)); } } public static class MapStage1Text extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private boolean isYpath = false; private boolean isXpath = false; private double s; public void configure(JobConf job) { s = Double.parseDouble(job.get("s")); System.out.println("ScalarMult.MapStage1: s = " + s); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); double out_val = 0; if( line_text.charAt(tabpos+1) == 'v') { out_val = Double.parseDouble(line_text.substring(tabpos+2)); } else { out_val = Double.parseDouble(line_text.substring(tabpos+1)); } output.collect(new IntWritable(out_key), new Text("v" + (s*out_val)) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new ScalarMult(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("ScalarMult "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 2 ) { return printUsage(); } Path in_path = new Path(args[0]); double s = Double.parseDouble(args[1]); final FileSystem fs = FileSystem.get(getConf()); Path smult_output = new Path("smult_output"); fs.delete(smult_output); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing ScalarMult. In_path=" + in_path.getName() + ", s=" + s + "\n"); JobClient.runJob( configScalarMult(in_path, smult_output, s) ); System.out.println("\n[PEGASUS] ScalarMult computed. Output is saved in HDFS " + smult_output.getName() + "\n"); return 0; } // Configure ScalarMult protected JobConf configScalarMult (Path in_path, Path smult_output, double s) throws Exception { final JobConf conf = new JobConf(getConf(), ScalarMult.class); conf.set("s", "" + s); conf.setJobName("ScalarMult"); conf.setMapperClass(ScalarMult.MapStage1Text.class); FileInputFormat.setInputPaths(conf, in_path); FileOutputFormat.setOutputPath(conf, smult_output); conf.setNumReduceTasks( 0 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class);//conf.setOutputValueClass(DoubleWritable.class); return conf; } } PEGASUS/src/pegasus/heigen/SaxpyTextoutput.java0000644000000000000000000001435311443145611020463 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: SaxpyTextoutput.java - Compute Saxpy operation which is to compute ax + b Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // y = y + ax public class SaxpyTextoutput extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: make initial pagerank vector ////////////////////////////////////////////////////////////////////// // MapStage1: public static class MapStage1 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private boolean isYpath = false; private boolean isXpath = false; private double a; public void configure(JobConf job) { String y_path = job.get("y_path"); String x_path = job.get("x_path"); a = Double.parseDouble(job.get("a")); String input_file = job.get("map.input.file"); if(input_file.contains(y_path)) isYpath = true; else if(input_file.contains(x_path)) isXpath = true; System.out.println("SaxpyTextoutput.MapStage1: map.input.file = " + input_file + ", isYpath=" + isYpath + ", isXpath=" + isXpath + ", a=" + a); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); double out_val = 0; if( line_text.charAt(tabpos+1) == 'v') { out_val = Double.parseDouble(line_text.substring(tabpos+2)); } else { out_val = Double.parseDouble(line_text.substring(tabpos+1)); } if( isYpath ) { output.collect( new IntWritable(out_key) , new DoubleWritable(out_val) ); } else if( isXpath ) { output.collect( new IntWritable(out_key) , new DoubleWritable( a * out_val ) ); } } } // RedStage1 public static class RedStage1 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i = 0; double val_double[] = new double[2]; val_double[0] = 0; val_double[1] = 0; while (values.hasNext()) { val_double[i] = values.next().get(); i++; } double result = val_double[0] + val_double[1]; if( result != 0 ) output.collect(key, new Text("v" + result)); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new SaxpyTextoutput(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("SaxpyTextoutput <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 4 ) { return printUsage(); } int ret_val = 0; nreducers = Integer.parseInt(args[0]); Path y_path = new Path(args[1]); Path x_path = new Path(args[2]); double param_a = Double.parseDouble(args[3]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing SaxpyTextoutput. y_path=" + y_path.getName() + ", x_path=" + x_path.getName() + ", a=" + param_a + "\n"); final FileSystem fs = FileSystem.get(getConf()); Path saxpy_output = new Path("saxpy_output"); if( y_path.getName().equals("saxpy_output") ) { System.out.println("saxpy(): output path name is same as the input path name: changing the output path name to saxpy_output1"); saxpy_output = new Path("saxpy_output1"); ret_val = 1; } fs.delete(saxpy_output); JobClient.runJob( configSaxpyTextoutput(y_path, x_path, saxpy_output, param_a) ); System.out.println("\n[PEGASUS] SaxpyTextoutput computed. Output is saved in HDFS " + saxpy_output.getName() + "\n"); return ret_val; // return value : 1 (output path is saxpy_output1) // 0 (output path is saxpy_output) } // Configure SaxpyTextoutput protected JobConf configSaxpyTextoutput (Path py, Path px, Path saxpy_output, double a) throws Exception { final JobConf conf = new JobConf(getConf(), SaxpyTextoutput.class); conf.set("y_path", py.getName()); conf.set("x_path", px.getName()); conf.set("a", "" + a); conf.setJobName("SaxpyTextoutput"); conf.setMapperClass(SaxpyTextoutput.MapStage1.class); conf.setReducerClass(SaxpyTextoutput.RedStage1.class); FileInputFormat.setInputPaths(conf, py, px); FileOutputFormat.setOutputPath(conf, saxpy_output); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setMapOutputValueClass(DoubleWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/heigen/L1normBlock.java0000644000000000000000000001012011443145611017340 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: L1normBlock.java - Compute L1 norm with block based method. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class L1normBlock extends Configured implements Tool { public static class MapStage1 extends MapReduceBase implements Mapper { protected int block_width = 16; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); String val_str = line_text.substring(tabpos+1); double []xvec = MatvecUtils.decodeBlockVector(val_str, block_width) ; double sum = 0; for(int i = 0; i < block_width; i++) { if( xvec[i] != 0 ) sum += Math.abs(xvec[i]); } output.collect( new IntWritable(0) , new DoubleWritable( sum ) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; int block_width = 16; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new L1normBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("L1normBlock <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 2 ) { return printUsage(); } Path in_path = new Path(args[0]); block_width = Integer.parseInt(args[1]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing L1normBlock. in_path=" + in_path.getName() + "\n"); final FileSystem fs = FileSystem.get(getConf()); Path l1norm_output = new Path("l1norm_output"); fs.delete(l1norm_output); JobClient.runJob(configL1norm(in_path, l1norm_output)); System.out.println("\n[PEGASUS] L1norm computed. Output is saved in HDFS " + l1norm_output.getName() + "\n"); return 0; } // Configure L1 norm protected JobConf configL1norm (Path in_path, Path out_path) throws Exception { final JobConf conf = new JobConf(getConf(), L1normBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("L1normBlock"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(PegasusUtils.RedSumDouble.class); conf.setCombinerClass(PegasusUtils.RedSumDouble.class); FileInputFormat.setInputPaths(conf, in_path); FileOutputFormat.setOutputPath(conf, out_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } } PEGASUS/src/pegasus/heigen/L1norm.java0000644000000000000000000000755311443145611016405 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: L1norm.java - Compute L1 norm with block based method. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // y = y + ax public class L1norm extends Configured implements Tool { public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); double raw_val = 0; if( tabpos > 0 ) { if( line_text.charAt(tabpos+1) == 'v') { raw_val = Math.abs(Double.parseDouble(line_text.substring(tabpos+2))); } else { raw_val = Math.abs(Double.parseDouble(line_text.substring(tabpos+1))); } } else { raw_val = 1.0; } output.collect(new IntWritable(0), new DoubleWritable(Math.abs(raw_val))); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new Saxpy(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("L1norm "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 1 ) { return printUsage(); } Path in_path = new Path(args[0]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing L1norm. in_path=" + in_path.getName() + "\n"); final FileSystem fs = FileSystem.get(getConf()); Path l1norm_output = new Path("l1norm_output"); fs.delete(l1norm_output); JobClient.runJob(configL1norm(in_path, l1norm_output)); System.out.println("\n[PEGASUS] L1norm computed. Output is saved in HDFS " + l1norm_output.getName() + "\n"); return 0; } // Configure l2 norm protected JobConf configL1norm (Path in_path, Path out_path) throws Exception { final JobConf conf = new JobConf(getConf(), L1norm.class); conf.setJobName("L1norm"); conf.setMapperClass(L1norm.MapStage1.class); conf.setReducerClass(PegasusUtils.RedSumDouble.class); conf.setCombinerClass(PegasusUtils.RedSumDouble.class); FileInputFormat.setInputPaths(conf, in_path); FileOutputFormat.setOutputPath(conf, out_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } } PEGASUS/src/pegasus/con_cmpth/0000755000000000000000000000000011443145611015073 5ustar rootrootPEGASUS/src/pegasus/con_cmpth/ConCmptIVGen.java0000644000000000000000000001502711443145611020177 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: ConCmptIVGen.java - generate initial vectors for HCC Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class ConCmptIVGen extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: Read init vector generation command, and generate the init vector. // - Input: init vector generation command // - Output: nodeid TAB initial_component_vector ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 3) return; output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1] + "\t" + line[2]) ); } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); System.out.println("RedStage1: number_nodes = " + number_nodes ); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int start_node, end_node; while (values.hasNext()) { Text cur_text = values.next(); final String[] line = cur_text.toString().split("\t"); start_node = Integer.parseInt(line[0]); end_node = Integer.parseInt(line[1]); for(int i = start_node; i <= end_node; i++) { output.collect( new IntWritable(i), new Text("v" + i) ); } } } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path input_path = null; protected Path output_path = null; protected int number_nodes = 0; protected int number_reducers = 1; FileSystem fs ; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new ConCmptIVGen(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("ConCmptIVGen <# of nodes> <# of machines>"); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 3 ) { return printUsage(); } input_path = new Path("cc_ivcmd"); output_path = new Path(args[0]); number_nodes = Integer.parseInt(args[1]); number_reducers = Integer.parseInt(args[2]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Generating initial vector. Output path = " + args[0] + ", Number of nodes = " + number_nodes + ", Number of machines =" + number_reducers + "\n"); // Generate command file and copy to HDFS "input_ConCmptIVGen" gen_cmd_file(number_nodes, number_reducers, input_path); // run job JobClient.runJob(configStage1()); fs = FileSystem.get(getConf()); fs.delete(input_path); System.out.println("\n[PEGASUS] Initial connected component vector generated in HDFS " + args[0] + "\n"); return 0; } // generate bitmask command file which is used in the 1st iteration. public void gen_cmd_file(int num_nodes, int num_reducers, Path input_path) throws IOException { // generate a temporary local command file int i; String file_name = "component_iv.temp"; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); out.write("# component vector file from ConCmptIVGen\n"); out.write("# number of nodes in graph = " + number_nodes + "\n"); System.out.print("creating initial vector generation cmd..."); int step = num_nodes/num_reducers; int start_node, end_node; for(i=0; i < num_reducers; i++) { start_node = i * step; if( i < num_reducers-1) end_node = step*(i+1) - 1; else end_node = num_nodes - 1; out.write(i + "\t" + start_node + "\t" + end_node + "\n"); } out.close(); System.out.println("done."); // copy it to curbm_path, and delete the temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (input_path.toString()+ "/" + file_name) ); } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptIVGen.class); conf.set("number_nodes", "" + number_nodes); conf.setJobName("ConCmptIVGen_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, input_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( number_reducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/con_cmpth/ConCmpt.java0000644000000000000000000005044611443145611017312 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: ConCmpt.java - HCC: Find Connected Components of graph Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; class ResultInfo { public int changed; public int unchanged; }; public class ConCmpt extends Configured implements Tool { public static int MAX_ITERATIONS = 2048; public static int changed_nodes[] = new int[MAX_ITERATIONS]; public static int unchanged_nodes[] = new int[MAX_ITERATIONS]; static int iter_counter = 0; ////////////////////////////////////////////////////////////////////// // STAGE 1: join matrix elements and vector elements using matrix.dst_id and vector.row_id ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); private final IntWritable to_node_int = new IntWritable(); int make_symmetric = 0; public void configure(JobConf job) { make_symmetric = Integer.parseInt(job.get("make_symmetric")); System.out.println("MapStage1 : make_symmetric = " + make_symmetric); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in the edge file return; final String[] line = line_text.split("\t"); if( line.length < 2 ) return; if( line[1].startsWith("m") ) { // input sample: 11 msu5 from_node_int.set(Integer.parseInt(line[0])); output.collect(from_node_int, new Text(line[1])); } else { // (src, dst) edge to_node_int.set(Integer.parseInt(line[1])); output.collect(to_node_int, new Text(line[0])); // invert dst and src if( make_symmetric == 1 ) { // make inverse egges from_node_int.set(Integer.parseInt(line[0])); if( to_node_int.get() != from_node_int.get() ) output.collect(from_node_int, new Text(line[1])); } } } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); System.out.println("RedStage1 : configure is called. number_nodes = " + number_nodes ); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { String component_id_str = ""; Set from_nodes_set = new HashSet(); boolean self_contained = false; String line=""; while (values.hasNext()) { Text from_cur_node = values.next(); line = from_cur_node.toString(); if (line.startsWith("m")) { // component info if( component_id_str.length() == 0 ) component_id_str = line.substring(3); } else { // edge line int from_node_int = Integer.parseInt(line); from_nodes_set.add( from_node_int ); if( key.get() == from_node_int) self_contained = true; } } if( self_contained == false ) // add self loop, if not exists. from_nodes_set.add(key.get()); Iterator from_nodes_it = from_nodes_set.iterator(); while (from_nodes_it.hasNext()) { String component_info; int cur_key_int = ((Integer)from_nodes_it.next()).intValue(); if( cur_key_int == key.get() ) { component_info = "msi" + component_id_str; output.collect(new IntWritable(cur_key_int), new Text(component_info)); } else { component_info = "moi" + component_id_str; output.collect(new IntWritable(cur_key_int), new Text(component_info)); } } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge partial component ids. // - Input: partial component ids // - Output: combined component ids //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; String out_val ="ms"; boolean bSelfChanged = false; char changed_prefix = 'x'; String complete_cistring = ""; int cur_min_nodeid = -1; int self_min_nodeid = -1; while (values.hasNext()) { String cur_ci_string = values.next().toString(); int cur_nodeid = -1; try { cur_nodeid = Integer.parseInt( cur_ci_string.substring(3) ); } catch (Exception ex) { System.out.println("Exception! cur_ci_string=[" + cur_ci_string + "]"); } if( cur_ci_string.charAt(1) == 's' ) { // for calculating individual diameter self_min_nodeid = cur_nodeid; } if( cur_min_nodeid == -1 ) { cur_min_nodeid = cur_nodeid; } else { if( cur_nodeid < cur_min_nodeid ) cur_min_nodeid = cur_nodeid; } } if( self_min_nodeid == cur_min_nodeid ) { changed_prefix = 'f'; // unchanged } else changed_prefix = 'i'; // changed out_val = out_val + changed_prefix + Integer.toString(cur_min_nodeid); output.collect(key, new Text( out_val ) ); } } public static class CombinerStage2 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { String out_val ="moi"; int cur_min_nodeid = -1; while (values.hasNext()) { Text cur_value_text = values.next(); String cur_ci_string = cur_value_text.toString(); int cur_nodeid = -1; try { cur_nodeid = Integer.parseInt( cur_ci_string.substring(3) ); } catch (Exception ex) { System.out.println("Exception! cur_ci_string=[" + cur_ci_string + "]"); } if( cur_ci_string.charAt(1) == 's' ) { // for calculating individual diameter output.collect(key, new Text(cur_value_text) ); continue; } if( cur_min_nodeid == -1 ) { cur_min_nodeid = cur_nodeid; } else { if( cur_nodeid < cur_min_nodeid ) cur_min_nodeid = cur_nodeid; } } if( cur_min_nodeid != -1 ) { out_val += Integer.toString(cur_min_nodeid); output.collect(key, new Text( out_val ) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: Calculate number of nodes whose component id changed/unchanged. // - Input: current component ids // - Output: number_of_changed_nodes ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { // output : f n ( n : # of node whose component didn't change) // i m ( m : # of node whose component changed) public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { if (value.toString().startsWith("#")) return; final String[] line = value.toString().split("\t"); char change_prefix = line[1].charAt(2); output.collect(new Text(Character.toString(change_prefix)), new Text(Integer.toString(1)) ); } } public static class RedStage3 extends MapReduceBase implements Reducer { public void reduce (final Text key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { final String line = values.next().toString(); int cur_value = Integer.parseInt(line); sum += cur_value; } output.collect(key, new Text(Integer.toString(sum)) ); } } ////////////////////////////////////////////////////////////////////// // STAGE 4 : Summarize connected component information // input : comcmpt_curbm // output : comcmpt_summaryout // min_node_id, number_of_nodes_in_the_component ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); final String[] line = line_text.split("\t"); output.collect( new IntWritable(Integer.parseInt(line[1].substring(3))), new IntWritable(1) ); } } public static class RedStage4 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int count = 0; while (values.hasNext()) { int cur_count = values.next().get(); count += cur_count; } output.collect(key, new IntWritable(count) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path curbm_path = null; protected Path tempbm_path = null; protected Path nextbm_path = null; protected Path output_path = null; protected Path summaryout_path = null; protected String local_output_path; protected int number_nodes = 0; protected int nreducers = 1; protected int cur_iter = 1; protected int start_from_newbm = 0; protected int make_symmetric = 0; // convert directed graph to undirected graph // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new ConCmpt(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("concmpt <# of nodes> <# of tasks> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 9 ) { return printUsage(); } edge_path = new Path(args[0]); curbm_path = new Path(args[1]); tempbm_path = new Path(args[2]); nextbm_path = new Path(args[3]); output_path = new Path(args[4]); summaryout_path = new Path("concmpt_summaryout"); number_nodes = Integer.parseInt(args[5]); nreducers = Integer.parseInt(args[6]); if( args[7].compareTo("new") == 0 ) start_from_newbm = 1; else { // args[7] == contNN e.g.) cont10 start_from_newbm = 0; cur_iter = Integer.parseInt(args[7].substring(4)); System.out.println("Starting from cur_iter = " + cur_iter); } if( args[8].compareTo("makesym") == 0 ) make_symmetric = 1; else make_symmetric = 0; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing connected component. Edge path = " + args[0] + ", Newbm = " + args[7] + ", Reducers = " + nreducers ); local_output_path = args[4] + "_temp"; if( start_from_newbm == 1 ) { System.out.print("Generating initial component vector for " + number_nodes + " nodes "); // create bitmask generate command file, and copy to curbm_path gen_component_vector_file(number_nodes, curbm_path); System.out.println(" done"); } else { System.out.println("Resuming from current component vector at radius(" + cur_iter + ")"); } // Iteratively calculate neighborhood function. for (int i = cur_iter; i < MAX_ITERATIONS; i++) { cur_iter++; JobClient.runJob(configStage1()); JobClient.runJob(configStage2()); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); final FileSystem fs = FileSystem.get(getConf()); // copy neighborhood information from HDFS to local disk, and read it! String new_path = local_output_path + "/" + i; fs.copyToLocalFile(output_path, new Path(new_path) ) ; ResultInfo ri = readIterationOutput(new_path); changed_nodes[iter_counter] = ri.changed; changed_nodes[iter_counter] = ri.unchanged; iter_counter++; System.out.println("Hop " + i + " : changed = " + ri.changed + ", unchanged = " + ri.unchanged); // Stop when the minimum neighborhood doesn't change if( ri.changed == 0 ) { System.out.println("All the component ids converged. Finishing..."); fs.delete(curbm_path); fs.delete(tempbm_path); fs.delete(output_path); fs.rename(nextbm_path, curbm_path); break; } // rotate directory fs.delete(curbm_path); fs.delete(tempbm_path); fs.delete(output_path); fs.rename(nextbm_path, curbm_path); } FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); // calculate summary information using an additional stage System.out.println("Summarizing connected components information..."); JobClient.runJob(configStage4()); // finishing. System.out.println("\n[PEGASUS] Connected component computed."); System.out.println("[PEGASUS] Total Iteration = " + iter_counter); System.out.println("[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as\n\"node_id 'msf'component_id\" format"); System.out.println("[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as\n\"component_id number_of_nodes\" format.\n"); return 0; } // generate component vector creation command public void gen_component_vector_file(int number_nodes, Path curbm_path) throws IOException { int start_pos = 0; int i; int max_filesize = 10000000; for(i=0; i < number_nodes; i+=max_filesize) { int len=max_filesize; if(len > number_nodes-i) len = number_nodes - i; gen_one_file(number_nodes, i, len, curbm_path); } } // generate component vector creation command public void gen_one_file(int number_nodes, int start_pos,int len, Path curbm_path) throws IOException { // generate a temporary local bitmask command file int i, j = 0, threshold = 0, count=0; String file_name = "component_vector.temp."+start_pos; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); out.write("# component vector file - hadoop\n"); out.write("# number of nodes in graph = " + number_nodes+", start_pos="+start_pos+"\n"); System.out.println("creating bitmask generation cmd for node " + start_pos + " ~ " + (start_pos+len)); for(i=0; i < number_nodes; i++) { int cur_nodeid = start_pos + i; out.write(cur_nodeid + "\tmsi" + cur_nodeid + "\n"); if(++j > len/10) { System.out.print("."); j = 0; } if(++count >= len) break; } out.close(); System.out.println(""); // copy it to curbm_path, and delete temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (curbm_path.toString()+ "/" + file_name) ); } // read neighborhood number after each iteration. public static ResultInfo readIterationOutput(String new_path) throws Exception { ResultInfo ri = new ResultInfo(); ri.changed = ri.unchanged = 0; String output_path = new_path + "/part-00000"; String file_line = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( output_path ), "UTF8")); // Read first line file_line = in.readLine(); // Read through file one line at time. Print line # and line while (file_line != null){ final String[] line = file_line.split("\t"); if(line[0].startsWith("i")) ri.changed = Integer.parseInt( line[1] ); else // line[0].startsWith("u") ri.unchanged = Integer.parseInt( line[1] ); file_line = in.readLine(); } in.close(); } catch (IOException e) { e.printStackTrace(); } return ri;//result; } // Configure stage1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmpt.class); conf.set("number_nodes", "" + number_nodes); conf.set("cur_iter", "" + cur_iter); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("ConCmpt_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path, curbm_path); FileOutputFormat.setOutputPath(conf, tempbm_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure stage2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), ConCmpt.class); conf.set("number_nodes", "" + number_nodes); conf.set("cur_iter", "" + cur_iter); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("ConCmpt_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); conf.setCombinerClass(CombinerStage2.class); FileInputFormat.setInputPaths(conf, tempbm_path); FileOutputFormat.setOutputPath(conf, nextbm_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure stage3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), ConCmpt.class); conf.set("number_nodes", "" + number_nodes); conf.setJobName("ConCmpt_Stage3"); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); FileInputFormat.setInputPaths(conf, nextbm_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( 1 ); // This is necessary. conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); return conf; } // Configure stage4 protected JobConf configStage4() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmpt.class); conf.set("number_nodes", "" + number_nodes); conf.set("cur_iter", "" + cur_iter); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("ConCmpt_Stage4"); conf.setMapperClass(MapStage4.class); conf.setReducerClass(RedStage4.class); conf.setCombinerClass(RedStage4.class); FileInputFormat.setInputPaths(conf, curbm_path); FileOutputFormat.setOutputPath(conf, summaryout_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/con_cmpth/ConCmptBlock.java0000644000000000000000000005171111443145611020261 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: ConCmptBlock.java - HCC: Find Connected Components of graph using block multiplication. This is a block-based version of HCC. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class ConCmptBlock extends Configured implements Tool { public static int MAX_ITERATIONS = 1024; public static int changed_nodes[] = new int[MAX_ITERATIONS]; public static int unchanged_nodes[] = new int[MAX_ITERATIONS]; static int iter_counter = 0; ////////////////////////////////////////////////////////////////////// // STAGE 1: generate partial block-component ids. // Hash-join edge and vector by Vector.BLOCKROWID == Edge.BLOCKCOLID where // vector: key=BLOCKID, value= msu (IN-BLOCK-INDEX VALUE)s // moc // edge: key=BLOCK-ROW BLOCK-COL, value=(IN-BLOCK-ROW IN-BLOCK-COL VALUE)s // - Input: edge_file, component_ids_from_the_last_iteration // - Output: partial component ids ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if( line.length < 2 ) return; if( line.length == 2 ) { // vector. component information. output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } else { // edge output.collect( new IntWritable(Integer.parseInt(line[1])), new Text(line[0] + "\t" + line[2]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { protected int block_width; protected int recursive_diagmult; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); recursive_diagmult = Integer.parseInt(job.get("recursive_diagmult")); System.out.println("RedStage1: block_width=" + block_width + ", recursive_diagmult=" + recursive_diagmult); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { ArrayList> vectorArr = null; // save vector ArrayList>> blockArr = new ArrayList>>(); // save blocks ArrayList blockRowArr = new ArrayList(); // save block rows(integer) while (values.hasNext()) { // vector: key=BLOCKID, value= (IN-BLOCK-INDEX VALUE)s // edge: key=BLOCK-COLID BLOCK-ROWID, value=(IN-BLOCK-COL IN-BLOCK-ROW VALUE)s String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { // vector : VALUE vectorArr = GIMV.parseVectorVal(line_text.substring(3), Integer.class); } else { // edge : BLOCK-ROWID VALUE blockArr.add( GIMV.parseBlockVal(line[1], Integer.class) ); int block_row = Integer.parseInt(line[0]); blockRowArr.add( block_row ); } } int blockCount = blockArr.size(); if( vectorArr == null)// || blockCount == 0 ) // missing vector or block. return; // output 'self' block to check convergence output.collect(key, GIMV.formatVectorElemOutput("msi", vectorArr) ); // For every matrix block, join it with vector and output partial results Iterator>> blockArrIter = blockArr.iterator(); Iterator blockRowIter = blockRowArr.iterator(); while( blockArrIter.hasNext() ){ ArrayList> cur_block = blockArrIter.next(); int cur_block_row = blockRowIter.next(); ArrayList> cur_mult_result = null; if( key.get() == cur_block_row && recursive_diagmult == 1 ) { // do recursive multiplication ArrayList> tempVectorArr = vectorArr; for(int i = 0; i < block_width; i++) { cur_mult_result = GIMV.minBlockVector( cur_block, tempVectorArr, block_width, 1); if( cur_mult_result == null || GIMV.compareVectors( tempVectorArr, cur_mult_result ) == 0 ) break; tempVectorArr = cur_mult_result; } } else { cur_mult_result = GIMV.minBlockVector( cur_block, vectorArr, block_width, 0); } Text output_vector = GIMV.formatVectorElemOutput("moi", cur_mult_result); if(output_vector.toString().length() > 0 ) output.collect(new IntWritable(cur_block_row), output_vector ); } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge partial comonent ids. // - Input: partial component ids // - Output: combined component ids //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { protected int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("RedStage2: block_width=" + block_width); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { ArrayList> self_vector = null; int [] out_vals = new int[block_width]; for(int i=0; i < block_width; i++) out_vals[i] = -1; while (values.hasNext()) { String cur_str = values.next().toString(); if( cur_str.charAt(1) == 's' ) { self_vector = GIMV.parseVectorVal(cur_str.substring(3), Integer.class); } ArrayList> cur_vector = GIMV.parseVectorVal(cur_str.substring(3), Integer.class); Iterator> vector_iter = cur_vector.iterator(); while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); if( out_vals[ v_elem.row ] == -1 ) out_vals[ v_elem.row ] = v_elem.val; else if( out_vals[ v_elem.row ] > v_elem.val ) out_vals[ v_elem.row ] = v_elem.val; } } ArrayList> new_vector = GIMV.makeIntVectors( out_vals, block_width ); int isDifferent = GIMV.compareVectors( self_vector, new_vector ); String out_prefix = "ms"; if( isDifferent == 1) out_prefix += "i"; // incomplete else out_prefix += "f"; // finished output.collect(key, GIMV.formatVectorElemOutput(out_prefix, new_vector) ); } } public static class CombinerStage2 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { String out_val ="moi"; int cur_min_nodeid = -1; while (values.hasNext()) { Text cur_value_text = values.next(); String cur_ci_string = cur_value_text.toString(); int cur_nodeid = -1; try { cur_nodeid = Integer.parseInt( cur_ci_string.substring(3) ); } catch (Exception ex) { System.out.println("Exception! cur_ci_string=[" + cur_ci_string + "]"); } if( cur_ci_string.charAt(1) == 's' ) { // for calculating individual diameter output.collect(key, new Text(cur_value_text) ); continue; } if( cur_min_nodeid == -1 ) { cur_min_nodeid = cur_nodeid; } else { if( cur_nodeid < cur_min_nodeid ) cur_min_nodeid = cur_nodeid; } } if( cur_min_nodeid != -1 ) { out_val += Integer.toString(cur_min_nodeid); output.collect(key, new Text( out_val ) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: Calculate number of nodes whose component id changed/unchanged. // - Input: current component ids // - Output: number_of_changed_nodes ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { // output : f n ( n : # of node whose component didn't change) // i m ( m : # of node whose component changed) public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { if (value.toString().startsWith("#")) return; final String[] line = value.toString().split("\t"); char change_prefix = line[1].charAt(2); output.collect(new Text(Character.toString(change_prefix)), new Text(Integer.toString(1)) ); } } public static class RedStage3 extends MapReduceBase implements Reducer { public void reduce (final Text key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { final String line = values.next().toString(); int cur_value = Integer.parseInt(line); sum += cur_value; } output.collect(key, new Text(Integer.toString(sum)) ); } } ////////////////////////////////////////////////////////////////////// // STAGE 4: Unfold the block component id format to plain format, after the bitstrings converged. // This is a map-only stage. // - Input: the converged component ids // - Output: (node_id, "msu"component_id) ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("MapStage4: block_width = " + block_width); } // input sample : //1 msu0 1 1 1 public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); final String[] tokens = line[1].substring(3).split(" "); int i; int block_id = Integer.parseInt(line[0] ); for(i = 0; i < tokens.length; i+=2) { int elem_row = Integer.parseInt(tokens[i]); int component_id = Integer.parseInt(tokens[i+1]); output.collect( new IntWritable(block_width * block_id + elem_row), new Text("msf" + component_id) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 5 : Summarize connected component information // input : comcmpt_curbm (block format) // output : comcmpt_summaryout // min_node_id, number_of_nodes_in_the_component ////////////////////////////////////////////////////////////////////// public static class MapStage5 extends MapReduceBase implements Mapper { private final IntWritable out_key_int = new IntWritable(); private final IntWritable out_count_int = new IntWritable(1); int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("MapStage5 : configure is called. block_width=" + block_width); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); final String[] line = line_text.split("\t"); final String[] elems = line[1].substring(3).split(" "); for(int i = 0; i < elems.length; i += 2) { int cur_minnode = Integer.parseInt(elems[i+1]); out_key_int.set( cur_minnode ); output.collect( out_key_int, out_count_int ); } } } public static class RedStage5 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int count = 0; while (values.hasNext()) { int cur_count = values.next().get(); count += cur_count; } IntWritable count_int = new IntWritable(count); output.collect(key, count_int ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path vector_path = null; protected Path curbm_path = null; protected Path tempbm_path = null; protected Path nextbm_path = null; protected Path output_path = null; protected Path curbm_unfold_path = null; protected Path summaryout_path = null; protected String local_output_path; protected int number_nodes = 0; protected int nreducers = 1; protected int cur_radius = 0; protected int block_width = 64; protected int recursive_diagmult = 0; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new ConCmptBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("ConCmptBlock <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 9 ) { return printUsage(); } int i; edge_path = new Path(args[0]); curbm_path = new Path(args[1]); tempbm_path = new Path(args[2]); nextbm_path = new Path(args[3]); output_path = new Path(args[4]); curbm_unfold_path = new Path("concmpt_curbm"); summaryout_path = new Path("concmpt_summaryout"); number_nodes = Integer.parseInt(args[5]); nreducers = Integer.parseInt(args[6]); if( args[7].compareTo("fast") == 0 ) recursive_diagmult = 1; else recursive_diagmult = 0; block_width = Integer.parseInt(args[8]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing connected component using block method. Reducers = " + nreducers + ", block_width = " + block_width); local_output_path = args[4] + "_temp"; // Iteratively calculate neighborhood function. for (i = cur_radius; i < MAX_ITERATIONS; i++) { cur_radius++; iter_counter++; JobClient.runJob(configStage1()); JobClient.runJob(configStage2()); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); final FileSystem fs = FileSystem.get(getConf()); // copy neighborhood information from HDFS to local disk, and read it! String new_path = local_output_path + "/" + i; fs.copyToLocalFile(output_path, new Path(new_path) ) ; ResultInfo ri = ConCmpt.readIterationOutput(new_path); changed_nodes[iter_counter] = ri.changed; changed_nodes[iter_counter] = ri.unchanged; System.out.println("Hop " + i + " : changed = " + ri.changed + ", unchanged = " + ri.unchanged); // Stop when the minimum neighborhood doesn't change if( ri.changed == 0 ) { System.out.println("All the component ids converged. Finishing..."); fs.delete(curbm_path); fs.delete(tempbm_path); fs.delete(output_path); fs.rename(nextbm_path, curbm_path); System.out.println("Unfolding the block structure for easy lookup..."); JobClient.runJob(configStage4()); break; } // rotate directory fs.delete(curbm_path); fs.delete(tempbm_path); fs.delete(output_path); fs.rename(nextbm_path, curbm_path); } FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); // calculate summary information using an additional pass System.out.println("Summarizing connected components information..."); JobClient.runJob(configStage5()); // finishing. System.out.println("\n[PEGASUS] Connected component computed."); System.out.println("[PEGASUS] Total Iteration = " + iter_counter); System.out.println("[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as\n\"node_id 'msf'component_id\" format"); System.out.println("[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as\n\"component_id number_of_nodes\" format.\n"); return 0; } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.set("recursive_diagmult", "" + recursive_diagmult); conf.setJobName("ConCmptBlock_pass1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path, curbm_path); FileOutputFormat.setOutputPath(conf, tempbm_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("ConCmptBlock_pass2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); FileInputFormat.setInputPaths(conf, tempbm_path); FileOutputFormat.setOutputPath(conf, nextbm_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.setJobName("ConCmptBlock_pass3"); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); FileInputFormat.setInputPaths(conf, nextbm_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( 1 );// This is necessary to summarize and save data. conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass4 protected JobConf configStage4() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("ConCmptBlock_pass4"); conf.setMapperClass(MapStage4.class); FileInputFormat.setInputPaths(conf, curbm_path); FileOutputFormat.setOutputPath(conf, curbm_unfold_path); conf.setNumReduceTasks( 0 ); //This is essential for map-only tasks. conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass5 protected JobConf configStage5() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("ConCmptBlock_pass5"); conf.setMapperClass(MapStage5.class); conf.setReducerClass(RedStage5.class); conf.setCombinerClass(RedStage5.class); FileInputFormat.setInputPaths(conf, curbm_path); FileOutputFormat.setOutputPath(conf, summaryout_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/pegasus/0000755000000000000000000000000011443145611014570 5ustar rootrootPEGASUS/src/pegasus/pegasus/GIMV.java0000644000000000000000000004511411443145611016202 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: GIMV.java - A main class for Generalized Iterative Matrix-Vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; class VectorElem { public short row; public T val; public VectorElem(short in_row, T in_val) { row = in_row; val = in_val; } public double getDouble() { return ((Double)val).doubleValue(); } }; class BlockElem { public short row; public short col; public T val; public BlockElem(short in_row, short in_col, T in_val) { row = in_row; col = in_col; val = in_val; } }; enum EdgeType { Real, Binary }; public class GIMV { // convert strVal to array of VectorElem. // strVal is msu(ROW-ID VALUE)s. ex) 0 0.5 1 0.3 // oc public static ArrayList> parseVectorVal(String strVal, Class type) { ArrayList arr = new ArrayList>(); final String[] tokens = strVal.split(" "); int i; for(i = 0; i < tokens.length; i += 2) { short row = Short.parseShort(tokens[i]); if( type.getSimpleName().equals("Integer") ) { int val = Integer.parseInt(tokens[i+1]); arr.add( new VectorElem(row, val) ); } else if( type.getSimpleName().equals("Double") ) { double val = Double.parseDouble(tokens[i+1]); arr.add( new VectorElem(row, val) ); } } return arr; } // parse HADI vector public static ArrayList> parseHADIVector(String strVal) { ArrayList arr = new ArrayList>(); final String[] tokens = strVal.substring(1).split(" "); for( int i = 0; i < tokens.length; i += 2 ) { short row = Short.parseShort(tokens[i]); String bitstring; bitstring = tokens[i+1]; arr.add( new VectorElem(row, bitstring) ); } // the maximum length of arr is block_width. return arr; } // compute the dot product of two vector blocks. // strVal is msu(ROW-ID VALUE)s. ex) 0 0.5 1 0.3 // oc public static ArrayList> multDiagVector(String strDiag, String strVec, int block_width) { short i; ArrayList arr = new ArrayList>(); if( strDiag.length() == 0 ) return arr; double []dVal1 = new double[block_width]; double []dVal2 = new double[block_width]; String[] tokens1 = strDiag.split(" "); String[] tokens2 = strVec.split(" "); for(i = 0; i < block_width; i++) { dVal1[i] = 0; dVal2[i] = 0; } for(i = 0; i < tokens1.length; i += 2) { short row = Short.parseShort(tokens1[i]); double val = Double.parseDouble(tokens1[i+1]); dVal1[row] = 1.0 / val; } for(i = 0; i < tokens2.length; i += 2) { short row = Short.parseShort(tokens2[i]); double val = Double.parseDouble(tokens2[i+1]); dVal2[row] = val; } for(i = 0; i < block_width; i++) { if( dVal1[i] != 0 && dVal2[i] != 0 ) arr.add( new VectorElem(i, (dVal1[i]*dVal2[i])) ); } return arr; } // return value : true (if every VectorElem starts with 'f') // false (otherwise) public static boolean IsCompleteHADIVector(ArrayList> arr) { Iterator> vector_iter = arr.iterator(); while( vector_iter.hasNext() ) { VectorElem cur_ve = vector_iter.next(); if( cur_ve.val.charAt(0) != 'f' ) return false; } return true; } public static ArrayList> minBlockVector(ArrayList> block, ArrayList> vector, int block_width, int isFastMethod) { int[] out_vals = new int[block_width]; // buffer to save output short i; for(i=0; i < block_width; i++) out_vals[i] = -1; Iterator> vector_iter; Iterator> block_iter; Map vector_map = new HashMap(); // initialize out_vals if( isFastMethod == 1 ) { vector_iter = vector.iterator(); while(vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); out_vals[ v_elem.row ] = v_elem.val; } } vector_iter = vector.iterator(); block_iter = block.iterator(); BlockElem saved_b_elem = null; while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); vector_map.put(v_elem.row, v_elem.val); } BlockElem b_elem; while(block_iter.hasNext() || saved_b_elem != null) { b_elem = block_iter.next(); Integer vector_val = vector_map.get (b_elem.col); if( vector_val != null) { int vector_val_int = vector_val.intValue(); if( out_vals[ b_elem.row ] == -1 ) out_vals[ b_elem.row ] = vector_val_int; else if( out_vals[ b_elem.row ] > vector_val_int ) out_vals[ b_elem.row ] = vector_val_int; } } ArrayList> result_vector = null; for(i = 0; i < block_width; i++) { if( out_vals[i] != -1 ) { if( result_vector == null ) result_vector = new ArrayList>(); result_vector.add( new VectorElem(i, out_vals[i]) ); } } return result_vector; } // Perform the BIT-OR() operation on one block and one vector. // return value : the result vector public static ArrayList> bworBlockVector(ArrayList> block, ArrayList> vector, int block_width, int nreplication, int encode_bitmask) { long[][] out_vals = new long[block_width][nreplication]; // buffer to save output short i; int j; for(i=0; i < block_width; i++) for(j=0; j < nreplication; j++) out_vals[i][j] = 0; Iterator> vector_iter; Iterator> block_iter; Map vector_map = new HashMap(); vector_iter = vector.iterator(); block_iter = block.iterator(); BlockElem saved_b_elem = null; while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); vector_map.put(v_elem.row, v_elem.val); } BlockElem b_elem; while(block_iter.hasNext() || saved_b_elem != null) { b_elem = block_iter.next(); String vector_str = vector_map.get (b_elem.col); if( vector_str != null ) { if( encode_bitmask == 1 ) { int startpos_bm = vector_str.indexOf('~'); int [] cur_mask = BitShuffleCoder.decode_bitmasks( vector_str.substring(startpos_bm + 1), nreplication); for(j = 0; j < nreplication; j++) out_vals[b_elem.row][j] = (out_vals[b_elem.row][j] | cur_mask[j]); } else { String[] tokens = vector_str.split("~"); for(j = 0; j < nreplication; j++) { long cur_mask = Long.parseLong( tokens[j+1], 16 ); out_vals[b_elem.row][j] = (out_vals[b_elem.row][j] | cur_mask); } } } } ArrayList> result_vector = new ArrayList>(); int nonzero_count = 0; for(i = 0; i < block_width; i++) { String out_str = "i"; for(j = 0; j < nreplication; j++) { if( out_vals[i][j] != 0 ) break; } if( j == nreplication ) continue; if( encode_bitmask == 1 ) { out_str += ( "~" + BitShuffleCoder.encode_bitmasks( out_vals[i], nreplication ) ); } else { for(j = 0; j < nreplication; j++) out_str = out_str + "~" + Long.toHexString(out_vals[i][j]) ; } result_vector.add( new VectorElem((short)i, out_str) ); } return result_vector; } // multiply one block and one vector // return : result vector public static ArrayList> multBlockVector(ArrayList> block, ArrayList> vector, int i_block_width) { double[] out_vals = new double[i_block_width]; // buffer to save output short i; for(i=0; i < i_block_width; i++) out_vals[i] = 0; Iterator> vector_iter = vector.iterator(); Iterator> block_iter = block.iterator(); BlockElem saved_b_elem = null; while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); BlockElem b_elem; while(block_iter.hasNext() || saved_b_elem != null) { if( saved_b_elem != null ) { b_elem = saved_b_elem; saved_b_elem = null; } else b_elem = block_iter.next(); // compare v_elem.row and b_elem.col if( b_elem.col < v_elem.row ) continue; else if( b_elem.col == v_elem.row ) { out_vals[ b_elem.row ] += b_elem.val * v_elem.val; } else { // b_elem.col > v_elem.row saved_b_elem = b_elem; break; } } } ArrayList> result_vector = null; for(i = 0; i < i_block_width; i++) { if( out_vals[i] != 0 ) { if( result_vector == null ) result_vector = new ArrayList>(); result_vector.add( new VectorElem(i, out_vals[i]) ); } } return result_vector; } // multiply one block and one vector, when the block is in the bit encoded format. // return : result vector public static ArrayList> multBlockVector(byte[] block, ArrayList> vector, int i_block_width) { double[] out_vals = new double[i_block_width]; // buffer to save output short i; for(i=0; i < i_block_width; i++) out_vals[i] = 0; Iterator> vector_iter = vector.iterator(); while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); int col = v_elem.row; for(int row = 0; row < i_block_width; row++) { int edge_elem = block[ (row*i_block_width + col)/8 ] & ( 1 << (col % 8) ); if( edge_elem > 0 ) { out_vals[ row ] += v_elem.val; } } } ArrayList> result_vector = null; for(i = 0; i < i_block_width; i++) { if( out_vals[i] != 0 ) { if( result_vector == null ) result_vector = new ArrayList>(); result_vector.add( new VectorElem(i, out_vals[i]) ); } } return result_vector; } // convert strVal to array of BlockElem. // strVal is (COL-ID ROW-ID VALUE)s. ex) 0 0 1 1 0 1 1 1 1 // note the strVal is tranposed. So we should tranpose it to (ROW-ID COL-ID ...) format. public static ArrayList> parseBlockVal(String strVal, Class type) { ArrayList arr = new ArrayList>(); final String[] tokens = strVal.split(" "); int i; if( type.getSimpleName().equals("Double") ) { for(i = 0; i < tokens.length; i += 3) { short row = Short.parseShort(tokens[i+1]); short col = Short.parseShort(tokens[i]); double val = Double.parseDouble(tokens[i+2]); BlockElem be = new BlockElem(row, col, val); arr.add( be ); } } else if ( type.getSimpleName().equals("Integer") ) { for(i = 0; i < tokens.length; i += 2) { short row = Short.parseShort(tokens[i+1]); short col = Short.parseShort(tokens[i]); BlockElem be = new BlockElem(row, col, 1); arr.add( be ); } } return arr; } // make Text format output by combining the prefix and vector elements. public static Text formatVectorElemOutput( String prefix, ArrayList> vector) { String cur_block_output = prefix; int isFirst = 1; if( vector != null && vector.size() > 0 ) { Iterator> cur_mult_result_iter = vector.iterator(); while( cur_mult_result_iter.hasNext() ) { VectorElem elem = cur_mult_result_iter.next(); if( cur_block_output != "" && isFirst == 0) cur_block_output += " "; cur_block_output += ("" + elem.row + " " + elem.val); isFirst = 0; } return new Text( cur_block_output ); } return new Text(""); } // make Text format HADI output by combining the prefix and vector elements. public static Text formatHADIVectorElemOutput( String prefix, ArrayList> vector ) { String cur_block_output = prefix; int isFirst = 1; if( vector != null && vector.size() > 0 ) { Iterator> cur_mult_result_iter = vector.iterator(); while( cur_mult_result_iter.hasNext() ) { VectorElem elem = cur_mult_result_iter.next(); if( cur_block_output != "" && isFirst == 0) cur_block_output += " "; if( elem.val.charAt(0) == 'i' ) cur_block_output += ("" + elem.row + " " + elem.val); else cur_block_output += ( "" + elem.row + " " + "f" + elem.val.substring(1) ); isFirst = 0; } return new Text( cur_block_output ); } return new Text(""); } // compare two vectors. // return value : 0 (same) // 1 (different) public static int compareVectors( ArrayList> v1, ArrayList> v2 ) { if( v1.size() != v2.size() ) return 1; Iterator> v1_iter = v1.iterator(); Iterator> v2_iter = v2.iterator(); while( v1_iter.hasNext() ) { VectorElem elem1 = v1_iter.next(); VectorElem elem2 = v2_iter.next(); if( elem1.row != elem2.row || ((Comparable)(elem1.val)).compareTo(elem2.val) != 0 ) return 1; } return 0; } // print the content of the input vector. public static int printVector( ArrayList> vector ) { Iterator> v_iter = vector.iterator(); System.out.print("vector : "); while( v_iter.hasNext() ) { VectorElem elem = v_iter.next(); System.out.print(" v[" + elem.row + "] = " + elem.val ); } System.out.println(""); return 0; } // make an integer vector public static ArrayList> makeIntVectors( int[] int_vals, int block_width ) { int i; ArrayList> result_vector = new ArrayList>(); for(i = 0; i < block_width; i++) { if( int_vals[i] != -1 ) { result_vector.add( new VectorElem((short)i, int_vals[i]) ); } } return result_vector; } // parse a hadi bitstring( k replication ) public static long[] parseHADIBitString( String in_str, int nreplication, int encode_bitmask ) { long[] cur_bm = new long[nreplication]; if( encode_bitmask == 1 ) { int tilde_pos = in_str.indexOf('~'); int [] cur_mask = BitShuffleCoder.decode_bitmasks( in_str.substring(tilde_pos+1), nreplication); for(int i = 0; i < nreplication; i++) { cur_bm[i] = cur_mask[i]; } } else { String[] tokens = in_str.split("~"); for(int i = 0; i < nreplication; i++) { cur_bm[i] = Long.parseLong( tokens[i+1], 16 ); } } return cur_bm; } // update HADI bitstring public static long[] updateHADIBitString( long [] cur_bm, String in_str, int nreplication, int encode_bitmask ) { if(encode_bitmask == 1) { int tilde_pos = in_str.indexOf('~'); int [] cur_mask = BitShuffleCoder.decode_bitmasks( in_str.substring(tilde_pos+1), nreplication); for(int i = 0; i < nreplication; i++) { cur_bm[i] = (cur_bm[i] | cur_mask[i]); } } else { String[] tokens = in_str.split("~"); for(int i = 0; i < nreplication; i++) { long cur_mask = Long.parseLong( tokens[i+1], 16 ); cur_bm[i] = (cur_bm[i] | cur_mask); } } return cur_bm; } // compare two bitstrings. // return value : 1 (if the two are different), 0(if they are same) public static int isDifferent( long[] left_bm, long[] right_bm, int nreplication ) { for(int i = 0; i< nreplication; i++) if( left_bm[i] != right_bm[i] ) return 1; return 0; } // Make a block vector using out_vals[][]. // The parameter self_bm[][] is used to set the first byte of second component of elements. // return value: ArrayList containing elements of the block vector. public static ArrayList> makeHADIBitString( long[][] out_vals, int block_width, long [][] self_bm, char []prefix, /*short []radius,*/ String[] saved_rad_nh, int nreplication, int cur_radius, int encode_bitmask) { int i, j; ArrayList> result_vector = new ArrayList>(); for(i = 0; i < block_width; i++) { String out_str = ""; int diff = isDifferent(out_vals[i], self_bm[i], nreplication ); if( diff == 1 ) { // changed if( saved_rad_nh[i] != null && saved_rad_nh[i].length() >= 1 ) { int colonPos = saved_rad_nh[i].indexOf(':'); out_str += ("i" + (cur_radius-1) + HadiUtils.update_radhistory(self_bm[i], saved_rad_nh[i].substring(colonPos+1), cur_radius, nreplication) );//out_str = "i"; } else out_str += ("i" + (cur_radius-1)); } else { // unchanged => completed. if( prefix[i] == 'i' ) { // incomplete out_str += ("c" + (cur_radius-1)) ; if( saved_rad_nh[i] != null ) { int colonPos = saved_rad_nh[i].indexOf(':'); if( colonPos >= 0 ) out_str += saved_rad_nh[i].substring(colonPos); } } else // complete_prefix == 'c' or 'f' out_str += "f" + saved_rad_nh[i]; // "f" + saved_radius } if( encode_bitmask == 1 ) { out_str += ( "~" + BitShuffleCoder.encode_bitmasks( out_vals[i], nreplication ) ); } else { for(j = 0; j < nreplication; j++) out_str = out_str + "~" + Long.toHexString(out_vals[i][j]) ; } result_vector.add( new VectorElem((short)i, out_str) ); } return result_vector; } // Make a block vector using out_vals[][]. // The parameter self_bm[][] is used to set the first byte of second component of elements. // return value: ArrayList containing elements of the block vector. public static ArrayList> makeHADICombinerBitString( long[][] out_vals, int block_width, int nreplication, int cur_radius, int encode_bitmask) { int i, j; ArrayList> result_vector = new ArrayList>(); for(i = 0; i < block_width; i++) { String out_str = "i0:0:1"; for(j = 0; j < nreplication; j++) { if( out_vals[i][j] != 0 ) break; } if( j == nreplication ) continue; if( encode_bitmask == 1 ) { out_str += ( "~" + BitShuffleCoder.encode_bitmasks( out_vals[i], nreplication ) ); } else { for(j = 0; j < nreplication; j++) out_str = out_str + "~" + Long.toHexString(out_vals[i][j]) ; } result_vector.add( new VectorElem((short)i, out_str) ); } return result_vector; } }; PEGASUS/src/pegasus/pegasus/PegasusUtils.java0000644000000000000000000004303211443145611020065 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: PegasusUtils.java - Common utility classes and functions Version: 2.0 ***********************************************************************/ package pegasus; import pegasus.matvec.*; import java.io.*; import java.util.*; import java.util.zip.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; import sun.misc.*; // common utility functions public class PegasusUtils { public static BufferedWriter open_log_file(String job_name_base) throws Exception { FileWriter fstream = new FileWriter(job_name_base + ".log"); BufferedWriter out = new BufferedWriter(fstream); return out; } public static String get_cur_datetime() { String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; Calendar cal = Calendar.getInstance(); SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); return sdf.format(cal.getTime()); } public static String format_duration(long millis) { String DATE_FORMAT_NOW = "HH:mm:ss"; SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); return sdf.format(new Date(millis)); } public static int min2( int a, int b ) { if( a < b ) return a; return b; } public static int max2( int a, int b ) { if( a > b ) return a; return b; } ///////////////////////////////////////////////////////////////////////// // High Level Functions // public static void MatvecNaive(Configuration conf, int nreducer, String mat_path, String vec_path, String out_path, int transpose, int ignore_weights) throws Exception { System.out.println("Running Matvecnaive: mat_path=" + mat_path + ", vec_path=" + vec_path); String [] args = new String[8]; args[0] = new String( "" + mat_path); args[1] = new String( "temp_mv"); args[2] = new String(out_path); args[3] = new String( "" + nreducer ); args[4] = "nosym"; args[5] = new String( vec_path); args[6] = new String( "" + transpose); args[7] = new String( "" + ignore_weights); ToolRunner.run(conf, new MatvecNaive(), args); System.out.println("Done Matvecnaive. Output is saved in HDFS " + out_path); return; } ///////////////////////////////////////////////////////////////////////// // Mappers and Reducers // // Identity Mapper public static class MapIdentity extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); if( tabpos > 0 ) { int out_key = Integer.parseInt(line_text.substring(0, tabpos)); output.collect( new IntWritable(out_key) , new Text(line_text.substring(tabpos+1)) ); } else { output.collect( new IntWritable(Integer.parseInt(line_text)) , new Text("") ); } } } public static class MapIdentityLongText extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); if( tabpos > 0 ) { long out_key = Long.parseLong(line_text.substring(0, tabpos)); output.collect( new LongWritable(out_key) , new Text(line_text.substring(tabpos+1)) ); } else { output.collect( new LongWritable(Long.parseLong(line_text)) , new Text("") ); } } } // Identity Mapper public static class MapIdentityDouble extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_key = Integer.parseInt(line_text.substring(0, tabpos)); output.collect( new IntWritable(out_key) , new DoubleWritable( Double.parseDouble(line_text.substring(tabpos+1)) ) ); } } // Histogram Mapper public static class MapHistogram extends MapReduceBase implements Mapper { long number_nodes = 0; int nreducers = 0; public void configure(JobConf job) { number_nodes = Long.parseLong(job.get("number_nodes")); //nreducers = Integer.parseInt(job.get("nreducers")); nreducers = job.getNumReduceTasks(); System.out.println("MapHistogram configure(): number_nodes = " + number_nodes + ", nreducers=" + nreducers); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); long first_column_key = 0; if( tabpos > 0 ) { String long_str= line_text.substring(0, tabpos); if(long_str.length() > 18 ) return; first_column_key = Long.parseLong( long_str ); } else { if(line_text.length() > 18 ) return; first_column_key = Long.parseLong(line_text); } int out_key = (int)(first_column_key % nreducers);//int out_key = (int)((first_column_key/(double)number_nodes) * nreducers) ; //System.out.println("first_column_key = " + first_column_key + ", out_key=" + out_key); output.collect( new IntWritable(out_key) , new Text("") ); } } // Histogram Mapper public static class MapHistogramText extends MapReduceBase implements Mapper { long number_nodes = 0; int nreducers = 0; public void configure(JobConf job) { number_nodes = Long.parseLong(job.get("number_nodes")); //nreducers = Integer.parseInt(job.get("nreducers")); nreducers = job.getNumReduceTasks(); System.out.println("MapHistogram configure(): number_nodes = " + number_nodes + ", nreducers=" + nreducers); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); long first_column_key = 0; if( tabpos > 0 ) { String long_str= line_text.substring(0, tabpos); first_column_key = Math.abs(long_str.hashCode()); } else { first_column_key = Math.abs(line_text.hashCode()); } int out_key = (int)(first_column_key % nreducers);//int out_key = (int)((first_column_key/(double)number_nodes) * nreducers) ; output.collect( new IntWritable(out_key) , new Text("") ); } } // Swap public static class MapSwapDouble extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); int tabpos = line_text.indexOf("\t"); int out_val = Integer.parseInt(line_text.substring(0, tabpos)); output.collect( new DoubleWritable( Double.parseDouble(line_text.substring(tabpos+1)) ), new IntWritable(out_val) ); } } public static class RedIdentity extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { while (values.hasNext()) { String cur_val = values.next().toString(); output.collect( key, new Text( cur_val ) ); } } } public static class RedIdentityGen extends MapReduceBase implements Reducer { public void reduce (final K1 key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { while (values.hasNext()) { V1 cur_val = values.next(); output.collect( key, cur_val ); } } } // Histogram Reducer public static class RedHistogram extends MapReduceBase implements Reducer { int partition_no = -1; public void configure(JobConf job) { partition_no = job.getInt("mapred.task.partition", 0) ; } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int count = 0; while (values.hasNext()) { values.next(); count++; } output.collect( key, new IntWritable( count ) ); } } // Sum Reducer (type: double) public static class RedSumDouble extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double sum = 0; while (values.hasNext()) { double cur_val = values.next().get(); sum += cur_val; } output.collect( key, new DoubleWritable( sum ) ); } } // Sum Reducer (key: long, value: 'v' + val) public static class RedSumLongText extends MapReduceBase implements Reducer { public void reduce (final LongWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { long sum = 0; while (values.hasNext()) { String str_val = values.next().toString(); long cur_val = Long.parseLong(str_val.substring(1)); sum += cur_val; } output.collect( key, new Text( "v" + sum ) ); } } // Sum Reducer (type: double) public static class RedSumDoubleTextKey extends MapReduceBase implements Reducer { public void reduce (final Text key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double sum = 0; while (values.hasNext()) { double cur_val = values.next().get(); sum += cur_val; } output.collect( key, new DoubleWritable( sum ) ); } } public static class RedSumDoubleLongKey extends MapReduceBase implements Reducer { public void reduce (final LongWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double sum = 0; while (values.hasNext()) { double cur_val = values.next().get(); sum += cur_val; } output.collect( key, new DoubleWritable( sum ) ); } } public static class RedAvgDouble extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double sum = 0; int count = 0; while (values.hasNext()) { double cur_val = values.next().get(); sum += cur_val; count++; } output.collect( key, new DoubleWritable( sum/count ) ); } } public static void copyToLocalFile(Configuration conf, Path hdfs_path, Path local_path) throws Exception { FileSystem fs = FileSystem.get(conf); // read the result fs.copyToLocalFile(hdfs_path, local_path) ; } // read neighborhood number after each iteration. public static double readLocaldirOneline(String new_path) throws Exception { String output_path = new_path + "/part-00000"; String str = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( output_path ), "UTF8")); str = in.readLine(); in.close(); } catch (UnsupportedEncodingException e) { } catch (IOException e) { } if( str != null ) { final String[] line = str.split("\t"); return Double.parseDouble(line[1]); } else return 0; } // read neighborhood number after each iteration. public static double readLocaldirOneline(String new_path, int partno) throws Exception { String output_path = new_path; if( partno >= 0 && partno < 10 ) output_path += "/part-0000" + partno; else if(partno >= 10 && partno < 100 ) output_path += "/part-000" + partno; else output_path += "/part-00" + partno; String str = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( output_path ), "UTF8")); str = in.readLine(); } catch (UnsupportedEncodingException e) { return 0; } catch (IOException e) { return 0; } if( str != null ) { final String[] line = str.split("\t"); return Double.parseDouble(line[1]); } else return 0; } // used by RWR public static String loadQueryNodeInfo(String input_file) throws Exception { String cur_line = ""; int query_count = 0; long []query_nodes = null; double []query_weights = null; double sum_weights = 0; String query_str = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( input_file ), "UTF8")); while( cur_line != null ) { cur_line = in.readLine(); if( cur_line.length() > 0 ) { String []tokens = cur_line.split("\t"); query_nodes[query_count] = Long.parseLong(tokens[0]); query_weights[query_count] = Double.parseDouble(tokens[1]); sum_weights += query_weights[query_count]; query_count++; } } } catch (UnsupportedEncodingException e) { } catch (IOException e) { } System.out.println("loadQueryNodeInfo: total " + query_count + " queries read."); // normalize for(int i = 0; i < query_count; i++) { query_weights[i] /= sum_weights; query_str += ("" + query_nodes[i] + " " + query_weights[i]); if( i != query_count - 1 ) query_str += " "; } return query_str; } ///////////////////////////////////////////////////////////////////////// // Linear Algebra related operations // // read L1 norm result public static double read_l1norm_result(Configuration conf) throws Exception { Path l2norm_output = new Path("l1norm_output"); FileSystem lfs = FileSystem.getLocal(conf); // read the result String local_output_path = "l1norm"; lfs.delete(new Path("l1norm/"), true); FileSystem fs = FileSystem.get(conf); fs.copyToLocalFile(l2norm_output, new Path(local_output_path) ) ; double result = PegasusUtils.readLocaldirOneline(local_output_path); lfs.delete(new Path("l1norm/"), true);//FileUtil.fullyDelete( fs.getLocal(conf), new Path(local_output_path)); //FileUtil.fullyDelete( FileSystem.getLocal(conf), new Path("lanczos")); return result; } ///////////////////////////////////////////////////////////////////////// // Partitioners // public static class RangePartition implements Partitioner { int number_nodes; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); System.out.println("RangePartition configure(): number_nodes = " + number_nodes); } // range partitioner public int getPartition(IntWritable key, V2 value, int numReduceTasks) { return (int)(( ((int)key.get())/(double)number_nodes) * numReduceTasks); } } public static class RangePartitionS1 implements Partitioner { int number_nodes; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); System.out.println("RangePartition configure(): number_nodes = " + number_nodes); } // range partitioner public int getPartition(IntWritable key, Text value, int numReduceTasks) { int result = (int)((key.get()/(double)number_nodes) * numReduceTasks); if( result == numReduceTasks ) return (numReduceTasks-1); else return result; } } public static class IdentityPartition implements Partitioner { public void configure(JobConf job) { } // range partitioner public int getPartition(IntWritable key, V2 value, int numReduceTasks) { int cand_partition = key.get(); if( cand_partition >= numReduceTasks ) return numReduceTasks-1; return cand_partition; } } } PEGASUS/src/pegasus/pagerank/0000755000000000000000000000000011443145611014711 5ustar rootrootPEGASUS/src/pegasus/pagerank/PagerankInitVector.java0000644000000000000000000001466111443145611021323 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: PageRankInitVector.java - Generate the initial vector for the PageRank. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class PagerankInitVector extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: make initial pagerank vector ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1] + "\t" + line[2]) ); } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 1; double initial_weight = 0.0f; String str_weight; private final IntWritable from_node_int = new IntWritable(); public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); initial_weight = (double)1.0 / (double)number_nodes; str_weight = new String("" + initial_weight ); System.out.println("MapStage1: number_nodes = " + number_nodes); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; while (values.hasNext()) { String cur_value_str = values.next().toString(); final String[] line = cur_value_str.split("\t"); int start_node = Integer.parseInt(line[0]); int end_node = Integer.parseInt(line[1]); for(i = start_node; i <= end_node; i++) { from_node_int.set( i ); output.collect(from_node_int, new Text("v" + initial_weight)); } } } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path output_path = null; protected Path initial_prinput_path = new Path("pr_input"); protected int number_nodes = 0; protected int nreducers = 1; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new PagerankInitVector(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("PagerankInitVector <# of nodes> <# of reducers>"); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 3 ) { System.out.println("args.length = " + args.length); int i; for(i=0; i < args.length; i++) { System.out.println("args[" + i + "] = " + args[i] ); } return printUsage(); } output_path = new Path(args[0]); number_nodes = Integer.parseInt(args[1]); nreducers = Integer.parseInt(args[2]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Generating initial PageRank vector for " + number_nodes + " nodes.\n"); // create PageRank generation command file, and copy to curpr_path gen_initial_pagerank_file(number_nodes, nreducers, initial_prinput_path); JobClient.runJob(configStage1()); System.out.println("\n[PEGASUS] Initial vector for PageRank generated in HDFS " + args[0] + "\n"); return 0; } // create PageRank init vector generation command public void gen_initial_pagerank_file(int number_nodes, int nmachines, Path initial_input_path) throws IOException { int gap = number_nodes / nmachines; int i; int start_node, end_node; String file_name = "pagerank.initial_rank.temp"; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); out.write("# number of nodes in graph = " + number_nodes+"\n"); System.out.println("creating initial pagerank (total nodes = " + number_nodes + ")"); for(i=0; i < nmachines; i++) { start_node = i * gap; if( i < nmachines - 1 ) end_node = (i+1)*gap - 1; else end_node = number_nodes - 1; out.write("" + i + "\t" + start_node + "\t" + end_node + "\n" ); } out.close(); // copy it to initial_input_path, and delete the temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (initial_input_path.toString()+ "/" + file_name) ); } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), PagerankInitVector.class); conf.set("number_nodes", "" + number_nodes); conf.setJobName("PagerankInitVector_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, initial_prinput_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setMapOutputValueClass(Text.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/pagerank/PagerankPrep.java0000644000000000000000000001312311443145611020133 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: PageRankPrep.java - Convert the original edge file into column-normalized adjacency matrix format. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class PagerankPrep extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: Convert the original edge file into column-normalized adjacency matrix format. // - Input: edge file // - Output: column-normalized adjacency matrix ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { int make_symmetric = 0; public void configure(JobConf job) { make_symmetric = Integer.parseInt(job.get("make_symmetric")); System.out.println("MapStage1 : make_symmetric = " + make_symmetric); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) // ignore ill-formated data. return; int src_id = Integer.parseInt(line[0]); int dst_id = Integer.parseInt(line[1]); output.collect( new IntWritable( src_id ), new Text(line[1]) ); if( make_symmetric == 1 ) output.collect( new IntWritable( dst_id ), new Text(line[0]) ); } } public static class RedStage1 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; ArrayList dst_nodes_list = new ArrayList(); while (values.hasNext()) { String cur_value_str = values.next().toString(); dst_nodes_list.add( Integer.parseInt(cur_value_str) ); } int deg = dst_nodes_list.size(); double elem_value = 1.0 / (double)deg ; for( i = 0; i < deg; i++) { output.collect( new IntWritable( dst_nodes_list.get(i) ), new Text( Integer.toString(key.get()) + "\t"+ elem_value )); } } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path output_path = null; protected Path edge_path = null; protected int nreducers = 1; protected int make_symmetric = 0; // convert directed graph to undirected graph // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new PagerankPrep(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("PagerankPrep <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 4 ) { System.out.println("args.length = " + args.length); int i; for(i=0; i < args.length; i++) { System.out.println("args[" + i + "] = " + args[i] ); } return printUsage(); } edge_path = new Path(args[0]); output_path = new Path(args[1]); nreducers = Integer.parseInt(args[2]); if( args[3].compareTo("makesym") == 0 ) make_symmetric = 1; else make_symmetric = 0; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Converting the adjacency matrix to column-normalized format.\n"); JobClient.runJob(configStage1()); System.out.println("\n[PEGASUS] Conversion finished."); System.out.println("[PEGASUS] Column normalized adjacency matrix is saved in the HDFS " + args[1] + "\n"); return 0; } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), PagerankPrep.class); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("PagerankPrep_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/pagerank/PagerankNaive.java0000644000000000000000000004713111443145611020275 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: PageRankNaive.java - PageRank using plain matrix-vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; class MinMaxInfo { public double min; public double max; }; public class PagerankNaive extends Configured implements Tool { protected static enum PrCounters { CONVERGE_CHECK } protected static double converge_threshold = 0.000001; ////////////////////////////////////////////////////////////////////// // STAGE 1: Generate partial matrix-vector multiplication results. // Perform hash join using Vector.rowid == Matrix.colid. // - Input: edge_file, pagerank vector // - Output: partial matrix-vector multiplication results. ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { int make_symmetric = 0; public void configure(JobConf job) { make_symmetric = Integer.parseInt(job.get("make_symmetric")); System.out.println("MapStage1 : make_symmetric = " + make_symmetric); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) return; if( line[1].charAt(0) == 'v' ) { // vector : ROWID VALUE('vNNNN') output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } else { // In other matrix-vector multiplication, we output (dst, src) here // However, In PageRank, the matrix-vector computation formula is M^T * v. // Therefore, we output (src,dst) here. int src_id = Integer.parseInt(line[0]); int dst_id = Integer.parseInt(line[1]); output.collect( new IntWritable( src_id ), new Text(line[1]) ); if( make_symmetric == 1 ) output.collect( new IntWritable( dst_id ), new Text(line[0]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; double random_coeff = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; System.out.println("RedStage1: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c + ", random_coeff = " + random_coeff); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double cur_rank = 0; ArrayList dst_nodes_list = new ArrayList(); while (values.hasNext()) { String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { if(line_text.charAt(0) == 'v') // vector : VALUE cur_rank = Double.parseDouble(line_text.substring(1)); else { // edge : ROWID dst_nodes_list.add( Integer.parseInt( line[0] ) ); } } } // add random coeff output.collect(key, new Text( "s" + cur_rank )); int outdeg = dst_nodes_list.size(); if( outdeg > 0 ) cur_rank = cur_rank / (double)outdeg; for( i = 0; i < outdeg; i++) { output.collect( new IntWritable( dst_nodes_list.get(i) ), new Text( "v" + cur_rank ) ); } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge multiplication results. // - Input: partial multiplication results // - Output: combined multiplication results //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; double random_coeff = 0; double converge_threshold = 0; int change_reported = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; converge_threshold = Double.parseDouble(job.get("converge_threshold")); System.out.println("RedStage2: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c + ", random_coeff = " + random_coeff + ", converge_threshold = " + converge_threshold ); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double next_rank = 0; double previous_rank = 0; while (values.hasNext()) { String cur_value_str = values.next().toString(); if( cur_value_str.charAt(0) == 's' ) previous_rank = Double.parseDouble( cur_value_str.substring(1) ); else next_rank += Double.parseDouble( cur_value_str.substring(1) ) ; } next_rank = next_rank * mixing_c + random_coeff; output.collect( key, new Text("v" + next_rank ) ); if( change_reported == 0 ) { double diff = Math.abs(previous_rank-next_rank); if( diff > converge_threshold ) { reporter.incrCounter(PrCounters.CONVERGE_CHECK, 1); change_reported = 1; } } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: After finding pagerank, calculate min/max pagerank // - Input: The converged PageRank vector // - Output: (key 0) minimum PageRank, (key 1) maximum PageRank ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); double pagerank = Double.parseDouble(line[1].substring(1)); output.collect( new IntWritable(0) , new DoubleWritable( pagerank ) ); output.collect( new IntWritable(1) , new DoubleWritable( pagerank ) ); } } public static class RedStage3 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; double random_coeff = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; System.out.println("RedStage2: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c + ", random_coeff = " + random_coeff ); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double min_value = 1.0; double max_value = 0.0; int min_or_max = key.get(); // 0 : min, 1: max while (values.hasNext()) { double cur_value = values.next().get(); if( min_or_max == 0 ) { // find min if( cur_value < min_value ) min_value = cur_value; } else { // find max if( cur_value > max_value ) max_value = cur_value; } } if( min_or_max == 0) output.collect( key, new DoubleWritable(min_value) ); else output.collect( key, new DoubleWritable(max_value) ); } } ////////////////////////////////////////////////////////////////////// // STAGE 4 : Find distribution of pageranks. // - Input: The converged PageRank vector // - Output: The histogram of PageRank vector in 1000 bins between min_PageRank and max_PageRank ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); double min_pr = 0; double max_pr = 0; double gap_pr = 0; int hist_width = 1000; public void configure(JobConf job) { min_pr = Double.parseDouble(job.get("min_pr")); max_pr = Double.parseDouble(job.get("max_pr")); gap_pr = max_pr - min_pr; System.out.println("MapStage4: min_pr = " + min_pr + ", max_pr = " + max_pr); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); double pagerank = Double.parseDouble(line[1].substring(1)); int distr_index = (int)(hist_width * (pagerank - min_pr)/gap_pr) + 1; if(distr_index == hist_width + 1) distr_index = hist_width; output.collect( new IntWritable(distr_index) , new IntWritable(1) ); } } public static class RedStage4 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { int cur_value = values.next().get(); sum += cur_value; } output.collect( key, new IntWritable(sum) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path vector_path = null; protected Path tempmv_path = null; protected Path output_path = null; protected String local_output_path; protected Path minmax_path = new Path("pr_minmax"); protected Path distr_path = new Path("pr_distr"); protected int number_nodes = 0; protected int niteration = 32; protected double mixing_c = 0.85f; protected int nreducers = 1; protected int make_symmetric = 0; // convert directed graph to undirected graph // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new PagerankNaive(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("PagerankNaive <# of nodes> <# of tasks> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 8 ) { return printUsage(); } int i; edge_path = new Path(args[0]); vector_path = new Path("pr_vector"); tempmv_path = new Path(args[1]); output_path = new Path(args[2]); number_nodes = Integer.parseInt(args[3]); nreducers = Integer.parseInt(args[4]); niteration = Integer.parseInt(args[5]); if( args[6].compareTo("makesym") == 0 ) make_symmetric = 1; else make_symmetric = 0; int cur_iteration = 1; if( args[7].startsWith("cont") ) cur_iteration = Integer.parseInt(args[7].substring(4)); local_output_path = args[2] + "_temp"; converge_threshold = ((double)1.0/(double) number_nodes)/10; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing PageRank. Max iteration = " +niteration + ", threshold = " + converge_threshold + ", cur_iteration=" + cur_iteration + "\n"); if( cur_iteration == 1 ) gen_initial_vector(number_nodes, vector_path); final FileSystem fs = FileSystem.get(getConf()); // Run pagerank until converges. for (i = cur_iteration; i <= niteration; i++) { JobClient.runJob(configStage1()); RunningJob job = JobClient.runJob(configStage2()); // The counter is newly created per every iteration. Counters c = job.getCounters(); long changed = c.getCounter(PrCounters.CONVERGE_CHECK); System.out.println("Iteration = " + i + ", changed reducer = " + changed); if( changed == 0 ) { System.out.println("PageRank vector converged. Now preparing to finish..."); fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(output_path, vector_path); break; } // rotate directory fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(output_path, vector_path); } if( i == niteration ) { System.out.println("Reached the max iteration. Now preparing to finish..."); } // find min/max of pageranks System.out.println("Finding minimum and maximum pageranks..."); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); String new_path = local_output_path + "/" ; fs.copyToLocalFile(minmax_path, new Path(new_path) ) ; MinMaxInfo mmi = readMinMax( new_path ); System.out.println("min = " + mmi.min + ", max = " + mmi.max ); // find distribution of pageranks JobClient.runJob(configStage4(mmi.min, mmi.max)); System.out.println("\n[PEGASUS] PageRank computed."); System.out.println("[PEGASUS] The final PageRanks are in the HDFS pr_vector."); System.out.println("[PEGASUS] The minium and maximum PageRanks are in the HDFS pr_minmax."); System.out.println("[PEGASUS] The histogram of PageRanks in 1000 bins between min_PageRank and max_PageRank are in the HDFS pr_distr.\n"); return 0; } // generate initial pagerank vector public void gen_initial_vector(int number_nodes, Path vector_path) throws IOException { int i, j = 0; int milestone = number_nodes/10; String file_name = "pagerank_init_vector.temp"; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); System.out.print("Creating initial pagerank vectors..."); double initial_rank = 1.0 / (double)number_nodes; for(i=0; i < number_nodes; i++) { out.write(i + "\tv" + initial_rank +"\n"); if(++j > milestone) { System.out.print("."); j = 0; } } out.close(); System.out.println(""); // copy it to curbm_path, and delete temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (vector_path.toString()+ "/" + file_name) ); } // read neighborhood number after each iteration. public static MinMaxInfo readMinMax(String new_path) throws Exception { MinMaxInfo info = new MinMaxInfo(); String output_path = new_path + "/part-00000"; String file_line = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( output_path ), "UTF8")); // Read first line file_line = in.readLine(); // Read through file one line at time. Print line # and line while (file_line != null){ final String[] line = file_line.split("\t"); if(line[0].startsWith("0")) info.min = Double.parseDouble( line[1] ); else info.max = Double.parseDouble( line[1] ); file_line = in.readLine(); } in.close(); } catch (IOException e) { e.printStackTrace(); } return info;//result; } // Configure pass1 protected JobConf configStage1 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("Pagerank_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path, vector_path); FileOutputFormat.setOutputPath(conf, tempmv_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("converge_threshold", "" + converge_threshold); conf.setJobName("Pagerank_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); FileInputFormat.setInputPaths(conf, tempmv_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("converge_threshold", "" + converge_threshold); conf.setJobName("Pagerank_Stage3"); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, minmax_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } // Configure pass4 protected JobConf configStage4 (double min_pr, double max_pr) throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("min_pr", "" + min_pr); conf.set("max_pr", "" + max_pr); conf.setJobName("Pagerank_Stage4"); conf.setMapperClass(MapStage4.class); conf.setReducerClass(RedStage4.class); conf.setCombinerClass(RedStage4.class); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, distr_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/pagerank/PagerankBlock.java0000644000000000000000000004355511443145611020273 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: PageRankBlock.java - PageRank using block matrix-vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class PagerankBlock extends Configured implements Tool { protected static enum PrCounters { CONVERGE_CHECK } protected static double converge_threshold = 0.000001; ////////////////////////////////////////////////////////////////////// // STAGE 1: Generate partial matrix-vector multiplication results. // Perform hash join using Vector.rowid == Matrix.colid. // - Input: edge_file, pagerank vector // - Output: partial matrix-vector multiplication results. ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if( line.length < 2 ) return; if( line.length == 2 ) { // vector. output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } else { // edge output.collect( new IntWritable(Integer.parseInt(line[1])), new Text(line[0] + "\t" + line[2]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { protected int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("RedStage1: block_width=" + block_width); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int i; float vector_val = 0; ArrayList to_nodes_list = new ArrayList(); ArrayList to_val_list = new ArrayList(); ArrayList> vectorArr = null; // save vector ArrayList>> blockArr = new ArrayList>>(); // save blocks ArrayList blockRowArr = new ArrayList(); // save block rows(integer) while (values.hasNext()) { // vector: key=BLOCKID, value= (IN-BLOCK-INDEX VALUE)s // matrix: key=BLOCK-COL BLOCK-ROW, value=(IN-BLOCK-COL IN-BLOCK-ROW VALUE)s String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { // vector : VALUE vectorArr = GIMV.parseVectorVal(line_text.substring(1), Double.class); } else { // edge : ROWID VALUE blockArr.add( GIMV.parseBlockVal(line[1], Double.class) ); int block_row = Integer.parseInt(line[0]); blockRowArr.add( block_row ); } } int blockCount = blockArr.size(); if( vectorArr == null || blockCount == 0 ) // missing vector or block. return; // output 'self' block to check convergence Text self_output = GIMV.formatVectorElemOutput("s", vectorArr); output.collect(key, self_output ); // For every matrix block, join it with vector and output partial results Iterator>> blockArrIter = blockArr.iterator(); Iterator blockRowIter = blockRowArr.iterator(); while( blockArrIter.hasNext() ){ ArrayList> cur_block = blockArrIter.next(); int cur_block_row = blockRowIter.next(); // multiply cur_block and vectorArr. ArrayList> cur_mult_result = GIMV.multBlockVector( cur_block, vectorArr, block_width); String cur_block_output = "o"; if( cur_mult_result != null && cur_mult_result.size() > 0 ) { Iterator> cur_mult_result_iter = cur_mult_result.iterator(); while( cur_mult_result_iter.hasNext() ) { VectorElem elem = cur_mult_result_iter.next(); if( cur_block_output != "o" ) cur_block_output += " "; cur_block_output += ("" + elem.row + " " + elem.val); } // output the partial result of multiplication. output.collect(new IntWritable(cur_block_row), new Text(cur_block_output)); } } } } ////////////////////////////////////////////////////////////////////// // PASS 2: merge partial multiplication results ////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); IntWritable node_key = new IntWritable(Integer.parseInt(line[0])); output.collect(node_key, new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { protected int block_width; double mixing_c = 0; double random_coeff = 0; double converge_threshold = 0; int number_nodes = 1; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); block_width = Integer.parseInt(job.get("block_width")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; converge_threshold = Double.parseDouble(job.get("converge_threshold")); System.out.println("RedStage2 : block_width=" + block_width + ", converge_threshold="+converge_threshold); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { ArrayList> self_vector = null; int i; double [] out_vals = new double[block_width]; for(i=0; i < block_width; i++) out_vals[i] = 0; while (values.hasNext()) { String cur_str = values.next().toString(); if( cur_str.charAt(0) == 's' ) { self_vector = GIMV.parseVectorVal(cur_str.substring(1), Double.class); continue; } ArrayList> cur_vector = GIMV.parseVectorVal(cur_str.substring(1), Double.class); Iterator> vector_iter = cur_vector.iterator(); while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); out_vals[ v_elem.row ] += v_elem.val; } } // output updated PageRank String out_str = "v"; for(i = 0; i < block_width; i++) { if( out_str.length() >1 ) out_str += " "; out_vals[i] = out_vals[i] * mixing_c + random_coeff; out_str += ("" + i + " " + out_vals[i]) ; } output.collect( key, new Text(out_str) ); // compare the previous and the current PageRank Iterator> sv_iter = self_vector.iterator(); while( sv_iter.hasNext() ) { VectorElem cur_ve = sv_iter.next(); double diff = Math.abs(cur_ve.val - out_vals[cur_ve.row]); if( diff > converge_threshold ) { reporter.incrCounter(PrCounters.CONVERGE_CHECK, 1); break; } } } } ////////////////////////////////////////////////////////////////////// // PASS 2.5: unfold the converged block PageRank results to plain format. // This is a map-only stage. // - Input: the converged block PageRank vector // - Output: (node_id, "v"PageRank_of_the_node) ////////////////////////////////////////////////////////////////////// public static class MapStage25 extends MapReduceBase implements Mapper { int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("MapStage25: block_width = " + block_width); } // input sample : //0 v0 0.11537637712698735 1 0.11537637712698735 public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); final String[] tokens = line[1].substring(1).split(" "); int i; int block_id = Integer.parseInt(line[0] ); for(i = 0; i < tokens.length; i+=2) { int elem_row = Integer.parseInt(tokens[i]); double pagerank = Double.parseDouble(tokens[i+1]); output.collect( new IntWritable(block_width * block_id + elem_row), new Text("v" + pagerank) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: After finding pagerank, calculate min/max pagerank // - Input: The converged PageRank vector // - Output: (key 0) minimum PageRank, (key 1) maximum PageRank ////////////////////////////////////////////////////////////////////// // We reuse MapStage3 and RedStage3 of PageRankNaive.java ////////////////////////////////////////////////////////////////////// // STAGE 4 : Find distribution of pageranks. // - Input: The converged PageRank vector // - Output: The histogram of PageRank vector in 1000 bins between min_PageRank and max_PageRank ////////////////////////////////////////////////////////////////////// // We reuse MapStage4 and RedStage4 of PageRankNaive.java ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path vector_path = null; protected Path tempmv_path = null; protected Path output_path = null; protected Path vector_unfold_path = new Path("pr_vector"); protected Path minmax_path = new Path("pr_minmax"); protected Path distr_path = new Path("pr_distr"); protected String local_output_path; protected int number_nodes = 0; protected int niteration = 32; protected double mixing_c = 0.85f; protected int nreducers = 1; protected int make_symmetric = 0; // convert directed graph to undirected graph protected int block_width = 64; FileSystem fs ; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new PagerankBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("PagerankBlock <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 8 ) { return printUsage(); } int i; edge_path = new Path(args[0]); vector_path = new Path(args[1]); tempmv_path = new Path(args[2]); output_path = new Path(args[3]); number_nodes = Integer.parseInt(args[4]); nreducers = Integer.parseInt(args[5]); niteration = Integer.parseInt(args[6]); block_width = Integer.parseInt(args[7]); local_output_path = args[2] + "_temp"; converge_threshold = ((double)1.0/(double) number_nodes)/50; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing PageRank using block method. Max iteration = " +niteration + ", threshold = " + converge_threshold + "\n"); fs = FileSystem.get(getConf()); // Iteratively calculate neighborhood function. for (i = 0; i < niteration; i++) { JobClient.runJob(configStage1()); RunningJob job = JobClient.runJob(configStage2()); Counters c = job.getCounters(); long changed = c.getCounter(PrCounters.CONVERGE_CHECK); System.out.println("Iteration = " + i + ", changed reducer = " + changed); if( changed == 0 ) { System.out.println("PageRank vector converged. Now preparing to finish..."); fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(output_path, vector_path); break; } // rotate directory fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(output_path, vector_path); } if( i == niteration ) { System.out.println("Reached the max iteration. Now preparing to finish..."); } // unfold the block PageRank to plain format System.out.println("Unfolding the block PageRank to plain format..."); JobClient.runJob(configStage25()); // find min/max of pageranks System.out.println("Finding minimum and maximum pageranks..."); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); String new_path = local_output_path + "/" ; fs.copyToLocalFile(minmax_path, new Path(new_path) ) ; MinMaxInfo mmi = PagerankNaive.readMinMax( new_path ); System.out.println("min = " + mmi.min + ", max = " + mmi.max ); // find distribution of pageranks JobClient.runJob(configStage4(mmi.min, mmi.max)); System.out.println("\n[PEGASUS] PageRank computed."); System.out.println("[PEGASUS] The final PageRanks are in the HDFS pr_vector."); System.out.println("[PEGASUS] The minium and maximum PageRanks are in the HDFS pr_minmax."); System.out.println("[PEGASUS] The histogram of PageRanks in 1000 bins between min_PageRank and max_PageRank are in the HDFS pr_distr.\n"); return 0; } // Configure pass1 protected JobConf configStage1 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankBlock.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("make_symmetric", "" + make_symmetric); conf.set("block_width", "" + block_width); conf.setJobName("Pagerank_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); fs.delete(tempmv_path, true); FileInputFormat.setInputPaths(conf, edge_path, vector_path); FileOutputFormat.setOutputPath(conf, tempmv_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankBlock.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("converge_threshold", "" + converge_threshold); conf.set("block_width", "" + block_width); conf.setJobName("Pagerank_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); fs.delete(output_path, true); FileInputFormat.setInputPaths(conf, tempmv_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage25 protected JobConf configStage25() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("Pagerank_Stage25"); conf.setMapperClass(MapStage25.class); fs.delete(vector_unfold_path, true); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, vector_unfold_path); conf.setNumReduceTasks( 0 ); //This is essential for map-only tasks. conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("converge_threshold", "" + converge_threshold); conf.setJobName("Pagerank_Stage3"); conf.setMapperClass(PagerankNaive.MapStage3.class); conf.setReducerClass(PagerankNaive.RedStage3.class); conf.setCombinerClass(PagerankNaive.RedStage3.class); fs.delete(minmax_path, true); FileInputFormat.setInputPaths(conf, vector_unfold_path); FileOutputFormat.setOutputPath(conf, minmax_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } // Configure pass4 protected JobConf configStage4 (double min_pr, double max_pr) throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("min_pr", "" + min_pr); conf.set("max_pr", "" + max_pr); conf.setJobName("Pagerank_Stage4"); conf.setMapperClass(PagerankNaive.MapStage4.class); conf.setReducerClass(PagerankNaive.RedStage4.class); conf.setCombinerClass(PagerankNaive.RedStage4.class); fs.delete(distr_path, true); FileInputFormat.setInputPaths(conf, vector_unfold_path); FileOutputFormat.setOutputPath(conf, distr_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/rwr/0000755000000000000000000000000011443145611013733 5ustar rootrootPEGASUS/src/pegasus/rwr/RWRNaive.java0000644000000000000000000005227411443145611016245 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: RWRNaive.java - RWR using plain matrix-vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class RWRNaive extends Configured implements Tool { protected static double converge_threshold = 0.05; ////////////////////////////////////////////////////////////////////// // STAGE 1: Generate partial matrix-vector multiplication results. // Perform hash join using Vector.rowid == Matrix.colid. // - Input: edge_file, rwr vector // - Output: partial matrix-vector multiplication results. ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { int make_symmetric = 0; public void configure(JobConf job) { make_symmetric = Integer.parseInt(job.get("make_symmetric")); System.out.println("MapStage1 : make_symmetric = " + make_symmetric); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) return; if( line[1].charAt(0) == 'v' ) { // vector : ROWID VALUE('vNNNN') output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } else { // In other matrix-vector multiplication, we output (dst, src) here // However, In RWR, the matrix-vector computation formula is M^T * v. // Therefore, we output (src,dst) here. int src_id = Integer.parseInt(line[0]); int dst_id = Integer.parseInt(line[1]); output.collect( new IntWritable( src_id ), new Text(line[1]) ); if( make_symmetric == 1 ) output.collect( new IntWritable( dst_id ), new Text(line[0]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; double random_coeff = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; System.out.println("RedStage1: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c + ", random_coeff = " + random_coeff); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double cur_rank = 0; ArrayList dst_nodes_list = new ArrayList(); while (values.hasNext()) { String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { if(line_text.charAt(0) == 'v') // vector : VALUE cur_rank = Double.parseDouble(line_text.substring(1)); else { // edge : ROWID dst_nodes_list.add( Integer.parseInt( line[0] ) ); } } } // add random coeff output.collect(key, new Text( "s" + cur_rank )); int outdeg = dst_nodes_list.size(); if( outdeg > 0 ) cur_rank = cur_rank / (double)outdeg; for( i = 0; i < outdeg; i++) { output.collect( new IntWritable( dst_nodes_list.get(i) ), new Text( "v" + cur_rank ) ); } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge multiplication results. // - Input: partial multiplication results // - Output: combined multiplication results //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); System.out.println("RedStage2: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double next_rank = 0; double previous_rank = 0; while (values.hasNext()) { String cur_value_str = values.next().toString(); if( cur_value_str.charAt(0) == 's' ) previous_rank = Double.parseDouble( cur_value_str.substring(1) ); else next_rank += Double.parseDouble( cur_value_str.substring(1) ) ; } next_rank = next_rank * mixing_c; output.collect( key, new Text("v" + next_rank ) ); } } ////////////////////////////////////////////////////////////////////// // STAGE 3: After finding rwr, calculate min/max rwr // - Input: The converged RWR vector // - Output: (key 0) minimum RWR, (key 1) maximum RWR ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); double rwr = Double.parseDouble(line[1].substring(1)); output.collect( new IntWritable(0) , new DoubleWritable( rwr ) ); output.collect( new IntWritable(1) , new DoubleWritable( rwr ) ); } } public static class RedStage3 extends MapReduceBase implements Reducer { int number_nodes = 0; double mixing_c = 0; double random_coeff = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); mixing_c = Double.parseDouble(job.get("mixing_c")); random_coeff = (1-mixing_c) / (double)number_nodes; System.out.println("RedStage2: number_nodes = " + number_nodes + ", mixing_c = " + mixing_c + ", random_coeff = " + random_coeff ); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double min_value = 1.0; double max_value = 0.0; int min_or_max = key.get(); // 0 : min, 1: max while (values.hasNext()) { double cur_value = values.next().get(); if( min_or_max == 0 ) { // find min if( cur_value < min_value ) min_value = cur_value; } else { // find max if( cur_value > max_value ) max_value = cur_value; } } if( min_or_max == 0) output.collect( key, new DoubleWritable(min_value) ); else output.collect( key, new DoubleWritable(max_value) ); } } ////////////////////////////////////////////////////////////////////// // STAGE 4 : Find distribution of rwrs. // - Input: The converged RWR vector // - Output: The histogram of RWR vector in 1000 bins between min_RWR and max_RWR ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { private final IntWritable from_node_int = new IntWritable(); double min_rwr = 0; double max_rwr = 0; double gap_rwr = 0; int hist_width = 1000; public void configure(JobConf job) { min_rwr = Double.parseDouble(job.get("min_rwr")); max_rwr = Double.parseDouble(job.get("max_rwr")); gap_rwr = max_rwr - min_rwr; System.out.println("MapStage4: min_rwr = " + min_rwr + ", max_rwr = " + max_rwr); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); double rwr = Double.parseDouble(line[1].substring(1)); int distr_index = (int)(hist_width * (rwr - min_rwr)/gap_rwr) + 1; if(distr_index == hist_width + 1) distr_index = hist_width; output.collect( new IntWritable(distr_index) , new IntWritable(1) ); } } public static class RedStage4 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { int cur_value = values.next().get(); sum += cur_value; } output.collect( key, new IntWritable(sum) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path vector_path = null; protected Path new_vector_path = null; protected Path tempmv_path = null; protected Path mv_output_path = null; protected Path query_raw_path = null; protected Path query_path = new Path("rwr_query_norm"); protected Path diff_path = new Path("rwr_vector_difference"); protected String local_output_path; protected Path minmax_path = new Path("rwr_minmax"); protected Path distr_path = new Path("rwr_distr"); protected long number_nodes = 0; protected int niteration = 32; protected double mixing_c = 0.85f; protected int nreducers = 1; protected int make_symmetric = 0; // convert directed graph to undirected graph FileSystem fs; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new RWRNaive(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("RWRNaive <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // y = y + ax public static Path Saxpy(Configuration conf, int nreducer, Path py, Path px, Path out_path, double a) throws Exception{ //System.out.println("Running Saxpy: py=" + py.getName() + ", px=" + px.getName() + ", a=" +a); String [] args = new String[4]; args[0] = new String("" + nreducer); args[1] = new String(py.getName()); args[2] = new String(px.getName() ); args[3] = new String("" + a); int saxpy_result = ToolRunner.run(conf, new Saxpy(), args); //Path ret_path = null; FileSystem fs = FileSystem.get(conf); fs.delete(out_path, true); if( saxpy_result == 1 ) fs.rename(new Path("saxpy_output1"), out_path ); else fs.rename(new Path("saxpy_output"), out_path ); return out_path; } // y = y + ax public static Path SaxpyTextoutput(Configuration conf, int nreducer, Path py, Path px, Path out_path, double a) throws Exception{ //System.out.println("Running Saxpy: py=" + py.getName() + ", px=" + px.getName() + ", a=" +a); String [] args = new String[4]; args[0] = new String("" + nreducer); args[1] = new String(py.getName()); args[2] = new String(px.getName() ); args[3] = new String("" + a); int saxpy_result = ToolRunner.run(conf, new SaxpyTextoutput(), args); //Path ret_path = null; FileSystem fs = FileSystem.get(conf); fs.delete(out_path, true); if( saxpy_result == 1 ) fs.rename(new Path("saxpy_output1"), out_path ); else fs.rename(new Path("saxpy_output"), out_path ); return out_path; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 8 ) { return printUsage(); } int i; edge_path = new Path(args[0]); vector_path = new Path("rwr_vector"); tempmv_path = new Path("rwr_tempmv"); mv_output_path = new Path("rwr_mv_output"); new_vector_path = new Path("rwr_vector_new"); query_raw_path = new Path(args[1]); number_nodes = Long.parseLong(args[2]); nreducers = Integer.parseInt(args[3]); niteration = Integer.parseInt(args[4]); if( args[5].compareTo("makesym") == 0 ) make_symmetric = 1; else make_symmetric = 0; int cur_iteration = 1; if( args[6].startsWith("cont") ) cur_iteration = Integer.parseInt(args[6].substring(4)); mixing_c = Double.parseDouble(args[7]); local_output_path = "rwr_output_temp"; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing RWR. Max iteration = " +niteration + ", threshold = " + converge_threshold + ", cur_iteration=" + cur_iteration + ", |V|=" + number_nodes + "\n"); fs = FileSystem.get(getConf()); if( cur_iteration == 1 ) gen_initial_vector(number_nodes, vector_path); // normalize query String []new_args = new String[4]; new_args[0] = args[1]; new_args[1] = "rwr_query_norm"; new_args[2] = "" + nreducers; new_args[3] = "" + (1.0 - mixing_c); ToolRunner.run(getConf(), new NormalizeVector(), new_args); // Iterate until converges. for (i = cur_iteration; i <= niteration; i++) { System.out.println("\n\nITERATION " + (i)); // v1 <- c*W*v JobClient.runJob(configStage1()); RunningJob job = JobClient.runJob(configStage2()); // v2 <- v1 + q SaxpyTextoutput( getConf(), nreducers, mv_output_path, query_path, new_vector_path, 1.0); // diff = || v2 - vector || Saxpy( getConf(), nreducers, new_vector_path, vector_path, diff_path, -1.0); // compute l1 norm new_args = new String[1]; new_args[0] = diff_path.getName(); ToolRunner.run(getConf(), new L1norm(), new_args); double difference = PegasusUtils.read_l1norm_result(getConf()); FileSystem lfs = FileSystem.getLocal(getConf()); lfs.delete(new Path("l1norm"), true); System.out.println("difference = " + difference ); if( difference < converge_threshold ) { System.out.println("RWR vector converged. Now preparing to finish..."); fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(new_vector_path, vector_path); break; } // rotate directory fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(new_vector_path, vector_path); } if( i == niteration ) { System.out.println("Reached the max iteration. Now preparing to finish..."); } // find min/max of rwrs System.out.println("Finding minimum and maximum rwrs..."); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); String new_path = local_output_path + "/" ; fs.copyToLocalFile(minmax_path, new Path(new_path) ) ; MinMaxInfo mmi = readMinMax( new_path ); System.out.println("min = " + mmi.min + ", max = " + mmi.max ); // find distribution of rwr score JobClient.runJob(configStage4(mmi.min, mmi.max)); System.out.println("\n[PEGASUS] RWR computed."); System.out.println("[PEGASUS] The final RWR scores are in the HDFS rwr_vector."); System.out.println("[PEGASUS] The minium and maximum scores are in the HDFS rwr_minmax."); System.out.println("[PEGASUS] The histogram of scores in 1000 bins are in the HDFS rwr_distr.\n"); return 0; } // generate initial rwr vector public void gen_initial_vector(long number_nodes, Path vector_path) throws IOException { int i, j = 0; int milestone = (int)number_nodes/10; String file_name = "rwr_init_vector.temp"; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); System.out.print("Creating initial rwr vectors..."); double initial_rank = 1.0 / (double)number_nodes; for(i=0; i < number_nodes; i++) { out.write(i + "\tv" + initial_rank +"\n"); if(++j > milestone) { System.out.print("."); j = 0; } } out.close(); System.out.println(""); // copy it to curbm_path, and delete temporary local file. fs.delete(vector_path, true); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (vector_path.toString()+ "/" + file_name) ); } // read neighborhood number after each iteration. public static MinMaxInfo readMinMax(String new_path) throws Exception { MinMaxInfo info = new MinMaxInfo(); String mv_output_path = new_path + "/part-00000"; String file_line = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( mv_output_path ), "UTF8")); // Read first line file_line = in.readLine(); // Read through file one line at time. Print line # and line while (file_line != null){ final String[] line = file_line.split("\t"); if(line[0].startsWith("0")) info.min = Double.parseDouble( line[1] ); else info.max = Double.parseDouble( line[1] ); file_line = in.readLine(); } in.close(); } catch (IOException e) { e.printStackTrace(); } return info;//result; } // Configure pass1 protected JobConf configStage1 () throws Exception { final JobConf conf = new JobConf(getConf(), RWRNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("RWR_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); fs.delete(tempmv_path, true); FileInputFormat.setInputPaths(conf, edge_path, vector_path); FileOutputFormat.setOutputPath(conf, tempmv_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), RWRNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.setJobName("RWR_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); fs.delete(mv_output_path, true); FileInputFormat.setInputPaths(conf, tempmv_path); FileOutputFormat.setOutputPath(conf, mv_output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), RWRNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.setJobName("RWR_Stage3" ); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); fs.delete(minmax_path, true); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, minmax_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } // Configure pass4 protected JobConf configStage4 (double min_rwr, double max_rwr) throws Exception { final JobConf conf = new JobConf(getConf(), RWRNaive.class); conf.set("min_rwr", "" + min_rwr); conf.set("max_rwr", "" + max_rwr); conf.setJobName("RWR_Stage4"); conf.setMapperClass(MapStage4.class); conf.setReducerClass(RedStage4.class); conf.setCombinerClass(RedStage4.class); fs.delete(distr_path, true); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, distr_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/rwr/RWRBlock.java0000644000000000000000000004752311443145611016236 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: RWRBlock.java - RWR using block matrix-vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus; import pegasus.matvec.*; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class RWRBlock extends Configured implements Tool { protected static double converge_threshold = 0.05; ////////////////////////////////////////////////////////////////////// // STAGE 1: Generate partial matrix-vector multiplication results. // Perform hash join using Vector.rowid == Matrix.colid. // - Input: edge_file, RWR vector // - Output: partial matrix-vector multiplication results. ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if( line.length < 2 ) return; if( line.length == 2 ) { // vector. output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } else { // edge output.collect( new IntWritable(Integer.parseInt(line[1])), new Text(line[0] + "\t" + line[2]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { protected int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("RedStage1: block_width=" + block_width); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int i; float vector_val = 0; ArrayList to_nodes_list = new ArrayList(); ArrayList to_val_list = new ArrayList(); ArrayList> vectorArr = null; // save vector ArrayList>> blockArr = new ArrayList>>(); // save blocks ArrayList blockRowArr = new ArrayList(); // save block rows(integer) while (values.hasNext()) { // vector: key=BLOCKID, value= (IN-BLOCK-INDEX VALUE)s // matrix: key=BLOCK-COL BLOCK-ROW, value=(IN-BLOCK-COL IN-BLOCK-ROW VALUE)s String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { // vector : VALUE String vector_str = line_text; char fc = vector_str.charAt(0); if( fc == 's' || fc == 'v' ) vector_str = vector_str.substring(1); vectorArr = GIMV.parseVectorVal(vector_str, Double.class); } else { // edge : ROWID VALUE blockArr.add( GIMV.parseBlockVal(line[1], Double.class) ); int block_row = Integer.parseInt(line[0]); blockRowArr.add( block_row ); } } int blockCount = blockArr.size(); if( vectorArr == null || blockCount == 0 ) // missing vector or block. return; // output 'self' block to check convergence Text self_output = GIMV.formatVectorElemOutput("s", vectorArr); output.collect(key, self_output ); // For every matrix block, join it with vector and output partial results Iterator>> blockArrIter = blockArr.iterator(); Iterator blockRowIter = blockRowArr.iterator(); while( blockArrIter.hasNext() ){ ArrayList> cur_block = blockArrIter.next(); int cur_block_row = blockRowIter.next(); // multiply cur_block and vectorArr. ArrayList> cur_mult_result = GIMV.multBlockVector( cur_block, vectorArr, block_width); String cur_block_output = "o"; if( cur_mult_result != null && cur_mult_result.size() > 0 ) { Iterator> cur_mult_result_iter = cur_mult_result.iterator(); while( cur_mult_result_iter.hasNext() ) { VectorElem elem = cur_mult_result_iter.next(); if( elem.val != 0 ) { if( cur_block_output != "o" ) cur_block_output += " "; cur_block_output += ("" + elem.row + " " + elem.val); } } // output the partial result of multiplication. if( cur_block_output.length() > 1 ) { output.collect(new IntWritable(cur_block_row), new Text(cur_block_output)); } } } } } ////////////////////////////////////////////////////////////////////// // PASS 2: merge partial multiplication results ////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); IntWritable node_key = new IntWritable(Integer.parseInt(line[0])); output.collect(node_key, new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { protected int block_width; double mixing_c = 0; int number_nodes = 1; int query_nodeid = -1; int query_blockrow = -1; int query_blockind = -1; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); block_width = Integer.parseInt(job.get("block_width")); mixing_c = Double.parseDouble(job.get("mixing_c")); query_nodeid = Integer.parseInt(job.get("query_nodeid")); query_blockrow = (int)(query_nodeid / block_width); query_blockind = query_nodeid % block_width; System.out.println("RedStage2 : block_width=" + block_width + ", query_nodeid=" + query_nodeid); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { ArrayList> self_vector = null; int i; double [] out_vals = new double[block_width]; for(i=0; i < block_width; i++) out_vals[i] = 0; while (values.hasNext()) { String cur_str = values.next().toString(); if( cur_str.charAt(0) == 's' ) { self_vector = GIMV.parseVectorVal(cur_str.substring(1), Double.class); continue; } ArrayList> cur_vector = GIMV.parseVectorVal(cur_str.substring(1), Double.class); Iterator> vector_iter = cur_vector.iterator(); while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); out_vals[ v_elem.row ] += v_elem.val; } } // output updated RWR String out_str = "";//"v"; for(i = 0; i < block_width; i++) { if( out_vals[i] > 0 ) { if( out_str.length() >1 ) out_str += " "; out_vals[i] = out_vals[i] * mixing_c; out_str += ("" + i + " " + out_vals[i]) ; } } if( out_str.length() > 0 ) output.collect( key, new Text(out_str) ); } } ////////////////////////////////////////////////////////////////////// // PASS 2.5: unfold the converged block RWR results to plain format. // This is a map-only stage. // - Input: the converged block RWR vector // - Output: (node_id, "v"RWR_of_the_node) ////////////////////////////////////////////////////////////////////// public static class MapStage25 extends MapReduceBase implements Mapper { int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("MapStage25: block_width = " + block_width); } // input sample : //0 v0 0.11537637712698735 1 0.11537637712698735 public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); String[] tokens = null; if( line[1].charAt(0) == 's' ) tokens = line[1].substring(1).split(" "); else tokens = line[1].split(" "); int i; int block_id = Integer.parseInt(line[0] ); for(i = 0; i < tokens.length; i+=2) { int elem_row = Integer.parseInt(tokens[i]); double rwr_score = Double.parseDouble(tokens[i+1]); output.collect( new IntWritable(block_width * block_id + elem_row), new Text("v" + rwr_score) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: After finding RWR, calculate min/max RWR // - Input: The converged RWR vector // - Output: (key 0) minimum RWR, (key 1) maximum RWR ////////////////////////////////////////////////////////////////////// // We reuse MapStage3 and RedStage3 of RWRNaive.java ////////////////////////////////////////////////////////////////////// // STAGE 4 : Find distribution of RWR scores. // - Input: The converged RWR vector // - Output: The histogram of RWR vector in 1000 bins between min_RWR and max_RWR ////////////////////////////////////////////////////////////////////// // We reuse MapStage4 and RedStage4 of RWRNaive.java ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path vector_path = null; protected Path new_vector_path = null; protected Path tempmv_path = null; protected Path mv_output_path = null; protected Path query_raw_path = null; protected Path query_path = new Path("rwr_query_norm"); protected Path query_block_path = new Path("rwr_query_norm_block"); protected Path diff_path = new Path("rwr_vector_difference"); protected Path vector_unfold_path = new Path("rwr_vector"); protected Path minmax_path = new Path("rwr_minmax"); protected Path distr_path = new Path("rwr_distr"); protected String local_output_path; protected long number_nodes = 0; protected int niteration = 32; protected double mixing_c = 0.85f; protected long query_nodeid = -1; protected int nreducers = 1; protected int block_width = 64; FileSystem fs; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new RWRBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("RWRBlock <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // y = y + ax public static Path SaxpyBlock(Configuration conf, int nreducer, Path py, Path px, Path out_path, double a, int block_width) throws Exception{ //System.out.println("Running Saxpy: py=" + py.getName() + ", px=" + px.getName() + ", a=" +a); String [] args = new String[5]; args[0] = new String("" + nreducer); args[1] = new String(py.getName()); args[2] = new String(px.getName() ); args[3] = new String("" + a); args[4] = new String("" + block_width); int saxpy_result = ToolRunner.run(conf, new SaxpyBlock(), args); //Path ret_path = null; FileSystem fs = FileSystem.get(conf); fs.delete(out_path, true); if( saxpy_result == 1 ) fs.rename(new Path("saxpy_output1"), out_path ); else fs.rename(new Path("saxpy_output"), out_path ); return out_path; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 8 ) { return printUsage(); } int i; edge_path = new Path(args[0]); vector_path = new Path(args[1]); tempmv_path = new Path("rwr_tempmv_block"); mv_output_path = new Path("rwr_output_block"); new_vector_path = new Path("rwr_vector_new"); query_raw_path = new Path(args[2]); number_nodes = Long.parseLong(args[3]); nreducers = Integer.parseInt(args[4]); niteration = Integer.parseInt(args[5]); block_width = Integer.parseInt(args[6]); mixing_c = Double.parseDouble(args[7]); local_output_path = "rwr_output_temp"; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing RWR using block method. Max iteration = " +niteration + ", threshold = " + converge_threshold + "\n"); fs = FileSystem.get(getConf()); // normalize query String []new_args = new String[4]; new_args[0] = args[2]; new_args[1] = "rwr_query_norm"; new_args[2] = "" + nreducers; new_args[3] = "" + (1.0 - mixing_c); ToolRunner.run(getConf(), new NormalizeVector(), new_args); // block-encode the query new_args = new String[7]; new_args[0] = "rwr_query_norm"; new_args[1] = "rwr_query_norm_block"; new_args[2] = "" + number_nodes; new_args[3] = "" + block_width; new_args[4] = "" + nreducers; new_args[5] = "null"; new_args[6] = "nosym"; ToolRunner.run(getConf(), new MatvecPrep(), new_args); // Iteratively calculate neighborhood function. for (i = 0; i < niteration; i++) { System.out.println("\n\nITERATION " + (i+1)); // v1 <- c*W*v JobClient.runJob(configStage1()); RunningJob job = JobClient.runJob(configStage2()); // v2 <- v1 + q SaxpyBlock( getConf(), nreducers, mv_output_path, query_block_path, new_vector_path, 1.0, block_width); // diff = || v2 - vector || SaxpyBlock( getConf(), nreducers, new_vector_path, vector_path, diff_path, -1.0, block_width); // compute l1 norm new_args = new String[2]; new_args[0] = diff_path.getName(); new_args[1] = "" + block_width; ToolRunner.run(getConf(), new L1normBlock(), new_args); double difference = PegasusUtils.read_l1norm_result(getConf()); FileSystem lfs = FileSystem.getLocal(getConf()); lfs.delete(new Path("l1norm"), true); System.out.println("difference = " + difference ); if( difference < converge_threshold ) { System.out.println("RWR vector converged. Now preparing to finish..."); fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(new_vector_path, vector_path); break; } // rotate directory fs.delete(vector_path); fs.delete(tempmv_path); fs.rename(new_vector_path, vector_path); } if( i == niteration ) { System.out.println("Reached the max iteration. Now preparing to finish..."); } // unfold the block RWR to plain format System.out.println("Unfolding the block RWR to plain format..."); JobClient.runJob(configStage25()); // find min/max of RWR System.out.println("Finding minimum and maximum RWR scores..."); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); String new_path = local_output_path + "/" ; fs.copyToLocalFile(minmax_path, new Path(new_path) ) ; MinMaxInfo mmi = PagerankNaive.readMinMax( new_path ); System.out.println("min = " + mmi.min + ", max = " + mmi.max ); // find distribution of RWR scores JobClient.runJob(configStage4(mmi.min, mmi.max)); System.out.println("\n[PEGASUS] RWR computed."); System.out.println("[PEGASUS] The final RWR scores are in the HDFS rwr_vector."); System.out.println("[PEGASUS] The minium and maximum RWRs are in the HDFS rwr_minmax."); System.out.println("[PEGASUS] The histogram of RWRs in 1000 bins between min_RWR and max_RWR are in the HDFS rwr_distr.\n"); return 0; } // Configure pass1 protected JobConf configStage1 () throws Exception { final JobConf conf = new JobConf(getConf(), RWRBlock.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("block_width", "" + block_width); conf.setJobName("RWRBlock_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); fs.delete(tempmv_path, true); FileInputFormat.setInputPaths(conf, edge_path, vector_path); FileOutputFormat.setOutputPath(conf, tempmv_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), RWRBlock.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.set("query_nodeid", "" + query_nodeid); conf.set("block_width", "" + block_width); conf.setJobName("RWRBlock_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); fs.delete(mv_output_path, true); FileInputFormat.setInputPaths(conf, tempmv_path); FileOutputFormat.setOutputPath(conf, mv_output_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage25 protected JobConf configStage25() throws Exception { final JobConf conf = new JobConf(getConf(), ConCmptBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("RWRBlock_Stage25"); conf.setMapperClass(MapStage25.class); fs.delete(vector_unfold_path, true); FileInputFormat.setInputPaths(conf, vector_path); FileOutputFormat.setOutputPath(conf, vector_unfold_path); conf.setNumReduceTasks( 0 ); //This is essential for map-only tasks. conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("mixing_c", "" + mixing_c); conf.setJobName("RWRBlock_Stage3"); conf.setMapperClass(PagerankNaive.MapStage3.class); conf.setReducerClass(PagerankNaive.RedStage3.class); conf.setCombinerClass(PagerankNaive.RedStage3.class); fs.delete(minmax_path, true); FileInputFormat.setInputPaths(conf, vector_unfold_path); FileOutputFormat.setOutputPath(conf, minmax_path); conf.setNumReduceTasks( 1 ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(DoubleWritable.class); return conf; } // Configure pass4 protected JobConf configStage4 (double min_pr, double max_pr) throws Exception { final JobConf conf = new JobConf(getConf(), PagerankNaive.class); conf.set("min_pr", "" + min_pr); conf.set("max_pr", "" + max_pr); conf.setJobName("RWRBlock_Stage4" ); conf.setMapperClass(PagerankNaive.MapStage4.class); conf.setReducerClass(PagerankNaive.RedStage4.class); conf.setCombinerClass(PagerankNaive.RedStage4.class); fs.delete(distr_path, true); FileInputFormat.setInputPaths(conf, vector_unfold_path); FileOutputFormat.setOutputPath(conf, distr_path); conf.setNumReduceTasks( nreducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/hadi/0000755000000000000000000000000011443145611014026 5ustar rootrootPEGASUS/src/pegasus/hadi/Hadi.java0000644000000000000000000011310111443145611015533 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: Hadi.java - A main class for Hadi-plain. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; class HadiResultInfo { public float nh; public int converged_nodes; public int changed_nodes; }; // Bit Shuffle Encoder/Decoder class BitShuffleCoder { // decode bitstrings public static int[] decode_bitmasks(String str, int K) { int i, j; int [] result = new int[K]; int cur_value; int fill_value = 1; int cumulated_value = 0; for(i = 0; i < K; i++) result[i] = 0; int [] byte_buffer = new int[4]; int byte_bufpos = 0; int cur_byte; byte [] str_bytes = str.getBytes(); for(i = 0; i < str_bytes.length; i += 2) { cur_byte = Integer.parseInt(str.substring(i, i+2), 16); if( (cur_byte & 0x80) != 0 ) { byte_buffer[byte_bufpos++] = cur_byte & 0x7F; cur_value = 0; for(j = 0; j < byte_bufpos; j++) { cur_value += ( byte_buffer[j] << (7*(byte_bufpos - 1 - j)) ); } // fill only one if( fill_value == 1 && cur_value > 0) fill_result( result, K, cur_value, cumulated_value ); cumulated_value += cur_value; fill_value = 1 - fill_value; byte_bufpos = 0; } else { byte_buffer[byte_bufpos++] = cur_byte & 0x7F; } } return result; } private static void fill_result(int[] result, int K, int cur_value, int cumulated_value) { int i, j; int start_i = cumulated_value / K ; // i : bit position of each bitmask int start_j = cumulated_value % K; // j : index of bitmask int count = 0; for(i = start_i; i < 32; i++) { if( i == start_i ) j = start_j; else j = 0; for(; j < K; j++) { result[j] |= (1 << (31-i)); if( ++count >= cur_value ) return; } } return ; } // encode bitmask public static String encode_bitmasks( int [] bm_array, int K ) { String result = ""; int i, j; byte prev_bit = -1; int cur_count = 0; byte cur_bit; int cur_mask; for(i = 0; i < 32; i++) { // i : bit position of each bitmask cur_mask = 1 << (31-i); for(j = 0; j < K; j++) { // j : index of bitmask if( (cur_mask & bm_array[j]) != 0 ) cur_bit = 1; else cur_bit = 0; if( prev_bit == -1 ) { if( cur_bit == 0 ) result += encode_value(0); // bit sequence start with 1. prev_bit = cur_bit; cur_count = 1; continue; } if( prev_bit == cur_bit ) { cur_count++; } else { result += encode_value (cur_count); prev_bit = cur_bit; cur_count = 1; } } } if( cur_count > 0 ) { result += encode_value(cur_count); } return result; } // encode bitmask public static String encode_bitmasks( long [] bm_array, int K ) { String result = ""; int i, j; byte prev_bit = -1; int cur_count = 0; byte cur_bit; long cur_mask; for(i = 0; i < 32; i++) { // i : bit position of each bitmask cur_mask = 1 << (31-i); for(j = 0; j < K; j++) { // j : index of bitmask if( (cur_mask & bm_array[j]) != 0 ) cur_bit = 1; else cur_bit = 0; if( prev_bit == -1 ) { if( cur_bit == 0 ) result += encode_value(0); // bit sequence start with 1. prev_bit = cur_bit; cur_count = 1; continue; } if( prev_bit == cur_bit ) { cur_count++; } else { // prev_bit != cur_bit result += encode_value (cur_count); prev_bit = cur_bit; cur_count = 1; } } } if( cur_count > 0 ) { result += encode_value(cur_count); } return result; } private static String encode_value(int number) { if( number == 0 ) { return "80"; } // find leftmost bit int i; int cur_mask; int result = 0; final int [] one_masks = { 0x7F, 0x3F80, 0x1FC000, 0xFE00000, 0xF0000000 }; for(i = 31; i >= 0; i--) { cur_mask = 1 << i; if( (cur_mask & number) != 0 ) break; } int nbytes = (int) Math.ceil( (float)(i+1)/ 7.0 ); for(i = 0; i < nbytes; i++) { if( i == 0 ) { result = (1 << 7 ) | (number & one_masks[0]); } else { int added_value = ((number & (one_masks[i]))) >> (7 * i); result |= (added_value << (8*i)); } } String temp_result = Integer.toHexString(result); if( temp_result.length() % 2 == 1 ) temp_result = "0" + temp_result; return temp_result; } }; // Flajolet - Martin bitmask(bitstring) class class FMBitmask { // generate K replicated bitmasks for one node public static String generate_bitmask(int number_node, int K, int encode_bitmask) { int i; int size_bitmask=32; String bitmask = "bsi0:0:1"; int bm_array[] = new int[K]; for(i=0; i= ninety_maxnh ) { bAboveThreshold = true; if( i > 0 ) { result = result + ":" + prev_hop + ":" + prev_nh; } } } if( bAboveThreshold ) { result = result + ":" + cur_hop + ":" + df.format(cur_nh); } prev_nh = cur_nh; prev_hop = cur_hop; } if( token.length > 0 && result.length() == 0 && cur_hop>0 ) { result = result + ":" + prev_hop + ":" + prev_nh; } result = result + ":" + cur_radius + ":" + df.format(max_nh); //System.out.println("[DEBUG] update_radhistory result=" + result); return result; } // calculate the effective diameter of a graph, given neighborhood results. public static float effective_diameter( float []N, int max_radius ) { float max_nh = N[ max_radius ]; int i; float threshold = max_nh * 0.9f; for(i=1; i <= max_radius; i++) { if( N[i] >= threshold ) { float decimal = (threshold - N[i-1])/(N[i] - N[i-1]) ; return (i - 1 + decimal); } } return -1; } // calculate the average diameter of a graph, given neighborhood results. public static float average_diameter( float []N, int max_radius ) { float min_nh = N[ 0 ]; float max_nh = N[ max_radius ]; int h; float sum = 0; for(h=1; h <= max_radius; h++) { sum += h * (N[h] - N[h-1]); } sum = sum / (max_nh - min_nh); return sum; } // read neighborhood number after each iteration. public static HadiResultInfo readNhoodOutput(String new_path) throws Exception { String output_path = new_path + "/part-00000"; String str = ""; try { BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream( output_path ), "UTF8")); str = in.readLine(); } catch (UnsupportedEncodingException e) { } catch (IOException e) { } final String[] line = str.split("\t"); HadiResultInfo ri = new HadiResultInfo(); ri.nh = Float.parseFloat( line[1] ); ri.converged_nodes = Integer.parseInt( line[2] ); ri.changed_nodes = Integer.parseInt( line[3] ); return ri; } }; // Hadi Main Class public class Hadi extends Configured implements Tool { public static int MAX_ITERATIONS = 2048; public static float N[] = new float[MAX_ITERATIONS]; // save N(h) static int iter_counter = 0; ////////////////////////////////////////////////////////////////////// // STAGE 1: generate partial bitstrings. // - Input: edge_file, bitstrings_from_the_last_iteration(or, bitstring generation command) // - Output: partial bitstrings ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { int make_symmetric = 0; // Indicates whether to make reverse edges or not. public void configure(JobConf job) { make_symmetric = Integer.parseInt(job.get("make_symmetric")); System.out.println("MapStage1: make_symmetric = " + make_symmetric); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in the edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) // ignore ill-formated data. return; if( line[1].startsWith("b") || // bitmask from previous iterations line[1].startsWith("c") ) { // bitmask creation command output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1])); } else { // (src, dst) edge int dst_nodeid = Integer.parseInt(line[1]); output.collect(new IntWritable(dst_nodeid), new Text(line[0])); // invert to and from if( make_symmetric == 1 ) { // make the reverse edge int src_nodeid = Integer.parseInt(line[0]); if( src_nodeid != dst_nodeid) output.collect(new IntWritable(src_nodeid), new Text(line[1])); } } } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; int nreplication = 0; int encode_bitmask = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); System.out.println("RedStage1: number_nodes = " + number_nodes + ", nreplication = " + nreplication + ", encode_bitmask="+encode_bitmask); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { String bitmask = ""; Set src_nodes_set = new HashSet(); boolean self_contained = false; String cur_value=""; while (values.hasNext()) { cur_value = values.next().toString(); if (cur_value.startsWith("b")) { // bitmask line bitmask = cur_value; } else if (cur_value.startsWith("c")) { // bitmask create command line bitmask = FMBitmask.generate_bitmask( number_nodes, nreplication, encode_bitmask ); } else { // edge line int src_node_int = Integer.parseInt(cur_value); src_nodes_set.add( src_node_int ); if( key.get() == src_node_int) self_contained = true; } } if( self_contained == false ) // add self loop, if not exists. src_nodes_set.add(key.get()); char complete_prefix='x'; try { if( bitmask.charAt(2) == 'i' ) complete_prefix = 'i'; else complete_prefix = 'f'; } catch(Exception ex) { System.out.println("Exception at bitmask.charAt(2). bitmask=["+bitmask+"],key="+key.get()); } try { Iterator src_nodes_it = src_nodes_set.iterator(); while (src_nodes_it.hasNext()) { String bitmask_new; int cur_key_int = ((Integer)src_nodes_it.next()).intValue(); if( cur_key_int == key.get() ) { // partial bitmask from 'self' bitmask_new = "bs" + complete_prefix + bitmask.substring(3); output.collect(new IntWritable(cur_key_int), new Text(bitmask_new)); } else { // partial bitmask from 'others' bitmask_new = "bo" + complete_prefix + bitmask.substring(3); output.collect(new IntWritable(cur_key_int), new Text(bitmask_new)); } } } catch(Exception ex) { System.out.println("Exception at bitmask.substring(3). bitmask=["+bitmask+"],key="+key.get()); } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge partial bitstrings. // - Input: partial bitstrings // - Output: combined bitstrings //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { int nreplication = 0; int encode_bitmask = 0; int cur_radius = 0; public void configure(JobConf job) { nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); cur_radius = Integer.parseInt(job.get("cur_radius")); System.out.println("RedStage2: nreplication = " + nreplication + ", encode_bitmask = "+encode_bitmask +", cur_radius = " + cur_radius); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { long [] bitmask={0,}; long [] self_bitmask={0,}; int bitmask_len = -1; int i; String out_val ="bs"; boolean bSelfChanged = false; char complete_prefix = 'x'; String complete_bitstring = ""; boolean bSelf; String saved_self_prefix=""; while (values.hasNext()) { String cur_bm_string = values.next().toString(); String cur_value = ""; int bitmask_start_index = cur_bm_string.indexOf(' '); cur_value = cur_bm_string.substring(bitmask_start_index+1); if( cur_bm_string.charAt(1) == 's' ) { // current bitmask came from itself complete_prefix = cur_bm_string.charAt(2); bSelf = true; int sp_pos = cur_bm_string.indexOf(' '); saved_self_prefix = cur_bm_string.substring(2, sp_pos); } else // current bitmask came from adjacent nodes bSelf = false; if( bitmask_len == -1 ) { bitmask_len = nreplication; bitmask = new long[nreplication]; self_bitmask = new long[nreplication]; for(i= 0; i < nreplication; i++) bitmask[i] = 0; } // update bitmasks using OR operations if( encode_bitmask == 1 ) { int [] cur_mask = BitShuffleCoder.decode_bitmasks( cur_value, nreplication); for(i = 0; i < nreplication; i++) { bitmask[i] = (bitmask[i] | cur_mask[i]); if( bSelf == true ) self_bitmask[i] = cur_mask[i]; } } else { String [] str_bitmasks = cur_value.split(" "); for(i = 0; i < nreplication; i++) { long cur_mask = Long.parseLong( str_bitmasks[i], 16 ); bitmask[i] = (bitmask[i] | cur_mask); if( bSelf == true ) self_bitmask[i] = cur_mask; } } } // check whether the self bitmask didn't change. for(i = 0; i< nreplication; i++) { if( self_bitmask[i] != bitmask[i] ) { bSelfChanged = true; break; } } if( bSelfChanged == true ) { // if at least a bitmask changed if( saved_self_prefix.length() >= 1 ) { int colonPos = saved_self_prefix.indexOf(':'); out_val += ("i" + (cur_radius-1) + HadiUtils.update_radhistory(self_bitmask, saved_self_prefix.substring(colonPos+1), cur_radius, nreplication) );//out_val += "i"; } else out_val += ("i" + (cur_radius-1)); } else { // if all bitmasks didn't change if( complete_prefix == 'i' ) { out_val += ("c" + (cur_radius-1)) ; int colonPos = saved_self_prefix.indexOf(':'); if( colonPos >= 0 ) out_val += saved_self_prefix.substring(colonPos); } else // complete_prefix == 'c' or 'f' out_val += saved_self_prefix; // "f" + saved_radius } if(encode_bitmask == 1) out_val += ( " " + BitShuffleCoder.encode_bitmasks( bitmask, nreplication ) ); else { for(i = 0; i < nreplication; i++) out_val = out_val + " " + Long.toHexString(bitmask[i]) ; } output.collect(key, new Text( out_val ) ); } } public static class CombinerStage2 extends MapReduceBase implements Reducer { int nreplication = 0; int encode_bitmask = 0; public void configure(JobConf job) { nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); System.out.println("CombinerStage2: nreplication = " + nreplication + ", encode_bitmask="+encode_bitmask); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { long [] bitmask={0,}; int bitmask_len = -1; int i; String out_val ="boi"; boolean bSelfChanged = false; char complete_prefix = 'x'; boolean bStopWhileLoop = false; while (values.hasNext()) { Text cur_value_text = values.next(); String cur_bm_string = cur_value_text.toString(); int bitmask_start_index = cur_bm_string.indexOf(' '); String cur_value = cur_bm_string.substring(bitmask_start_index+1); boolean bSelf; if( cur_bm_string.charAt(1) == 's' ) { // for calculating individual diameter output.collect(key, new Text(cur_value_text) ); continue; } if( bitmask_len == -1 ) { bitmask_len = nreplication; bitmask = new long[nreplication]; for(i= 0; i < nreplication; i++) bitmask[i] = 0; } // update bitmasks using OR operations if( encode_bitmask == 1 ) { int [] cur_mask = BitShuffleCoder.decode_bitmasks( cur_value, nreplication); for(i = 0; i < nreplication; i++) { bitmask[i] = (bitmask[i] | cur_mask[i]); } } else { String [] str_bitmasks = cur_value.split(" "); for(i = 0; i < str_bitmasks.length; i++) { long cur_mask = Long.parseLong( str_bitmasks[i], 16 ); bitmask[i] = (bitmask[i] | cur_mask); } } } // output partial bitmasks. if( bitmask_len != -1) { if( encode_bitmask == 1 ) out_val += ( " " + BitShuffleCoder.encode_bitmasks( bitmask, nreplication ) ); else { for(i = 0; i < nreplication; i++) out_val = out_val + " " + Long.toHexString(bitmask[i]) ; } output.collect(key, new Text( out_val ) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: Calculate N(h) and the number of changed nodes. // - Input: the converged bitstrings // - Output: Neighborhood(h) TAB number_of_converged_nodes TAB number_of_changed_nodes ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { private final IntWritable zero_id = new IntWritable(0); private Text output_val; int nreplication = 0; int encode_bitmask = 0; public void configure(JobConf job) { nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); System.out.println("MapStage3: nreplication = " + nreplication + ", encode_bitmask="+encode_bitmask); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { if (value.toString().startsWith("#")) // ignore comments line return; final String[] line = value.toString().split("\t"); char complete_prefix = line[1].charAt(2); int i; double avg_bitpos = 0; int converged_count = 0; int changed_count = 0; int bitmask_start_index = line[1].indexOf(' '); String bitmask_str = line[1].substring(bitmask_start_index+1); if( encode_bitmask == 1 ) { int [] bitmask = BitShuffleCoder.decode_bitmasks( bitmask_str, nreplication ); for(i = 0; i < nreplication; i++) avg_bitpos += (double) FMBitmask.find_least_zero_pos( bitmask[i] ); } else { String [] bitmasks = bitmask_str.split(" "); for(i = 0; i < bitmasks.length; i++) avg_bitpos += (double) FMBitmask.find_least_zero_pos( Long.parseLong( bitmasks[i], 16 ) ); } avg_bitpos = avg_bitpos / nreplication; if( complete_prefix == 'c') converged_count = 1; if( complete_prefix == 'i') changed_count = 1; output_val = new Text( Double.toString(Math.pow(2, avg_bitpos)/0.77351 ) + "\t" + converged_count + "\t" + changed_count); output.collect(zero_id, output_val); } } public static class RedStage3 extends MapReduceBase implements Reducer { private Text output_val; public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double nh_sum = 0.0f; // N(h) int converged_sum = 0; // number of converged nodes at this iteration int changed_sum = 0; // number of changed nodes while (values.hasNext()) { final String[] line = values.next().toString().split("\t"); nh_sum += Double.parseDouble(line[0]); converged_sum += Integer.parseInt(line[1]); changed_sum += Integer.parseInt(line[2]); } output_val = new Text( Double.toString(nh_sum) + "\t" + Integer.toString(converged_sum) + "\t" + Integer.toString(changed_sum) ); output.collect(key, output_val); } } ////////////////////////////////////////////////////////////////////// // STAGE 4: Calculate the effective radii of nodes, after the bitstrings converged. // This is a map-only stage. // - Input: the converged bitstrings // - Output: (node_id, "bsf"max_radius:eff_radius) ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { // input sample : // 0 bsi1:1:1.8:2:2.6 8f81878... public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); final String[] tokens = line[1].split(" "); int max_radius = 0; double eff_radius=0;//int eff_radius = 0; double eff_nh = 0; String radius_str = tokens[0].substring(3) ; if( radius_str.length() > 0 ) { String[] radius_info = radius_str.split(":"); if( radius_info.length > 1 ) { max_radius = Integer.parseInt( radius_info[ radius_info.length -2] ); eff_radius = max_radius; double max_nh = Double.parseDouble( radius_info[ radius_info.length -1] ); eff_nh = max_nh; double ninety_th = max_nh * 0.9; for(int i = radius_info.length -4; i >=0; i -= 2) { int cur_hop = Integer.parseInt( radius_info[i] ); double cur_nh = Double.parseDouble( radius_info[i+1] ); if( cur_nh >= ninety_th ) { eff_radius = cur_hop; eff_nh = cur_nh; } else { eff_radius = cur_hop + (double)(ninety_th - cur_nh)/(eff_nh - cur_nh); break; } } } DecimalFormat df = new DecimalFormat("#.##"); output.collect( new IntWritable(Integer.parseInt(line[0])), new Text("bsf" + max_radius + ":" + df.format(eff_radius)) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 5: Summarize radii information // - Input: current bitstrings // - Output: effective_radius TAB number_of_nodes_with_such_radius ////////////////////////////////////////////////////////////////////// public static class MapStage5 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); final String[] tokens = line[1].split(" "); String radius_str = tokens[0].substring(3) ; if( radius_str.length() > 0 ) { String[] radius_info = radius_str.split(":"); double eff_radius = Double.parseDouble(radius_info[1]); output.collect( new IntWritable((int)Math.round(eff_radius)), new IntWritable(1) ); } } } public static class RedStage5 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { int cur_count = values.next().get(); sum += cur_count; } output.collect(key, new IntWritable(sum)); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path curbm_path = null; protected Path tempbm_path = null; protected Path nextbm_path = null; protected Path output_path = null; protected Path radius_path = null; protected Path radius_summary_path = null; protected String local_output_path; protected int number_nodes = 0; protected int nreplication = 0; protected int nreducer = 1; enum EdgeType { Regular, Inverted }; protected EdgeType edge_type; protected int encode_bitmask = 0; protected int cur_radius = 1; protected int start_from_newbm = 0; protected int resume_from_radius = 0; protected int make_symmetric = 0; // convert directed graph to undirected graph // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new Hadi(), args); System.exit(result); } // Print the usage text. protected static int printUsage () { System.out.println("hadi <# of vertices> <# of replication> <# of reducers> <'max' or maximum_iteration>"); ToolRunner.printGenericCommandUsage(System.out); return -1; } public int run (final String[] args) throws Exception { int i; int max_iteration = MAX_ITERATIONS; if( args.length != 12 ) { return printUsage(); } edge_path = new Path(args[0]); curbm_path = new Path(args[1]); tempbm_path = new Path(args[2]); nextbm_path = new Path(args[3]); output_path = new Path(args[4]); number_nodes = Integer.parseInt(args[5]); radius_path = new Path("hadi_radius"); radius_summary_path = new Path("hadi_radius_summary"); nreplication = Integer.parseInt(args[6]); nreducer = Integer.parseInt(args[7]); if( args[8].compareTo("enc") == 0 ) encode_bitmask = 1; if( args[9].compareTo("newbm") == 0 ) { start_from_newbm = 1; } else if( args[9].startsWith("cont") ){ start_from_newbm = 0; cur_radius = Integer.parseInt(args[9].substring(4)); } if( args[10].compareTo("makesym") == 0 ) make_symmetric = 1; else make_symmetric = 0; if( args[11].compareTo("max") != 0 ) max_iteration = Integer.parseInt(args[11]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing Radii/Diameter. Current hop: " + cur_radius + ", edge_path: " + args[0] + ", encode: " + encode_bitmask + ", # reducers: " + nreducer + ", makesym: " + make_symmetric + ", max_iteration: " + max_iteration + "\n"); local_output_path = args[4] + number_nodes + "_temp"; if( start_from_newbm == 1 ) { System.out.print("Generating initial bitstrings for " + number_nodes + " nodes "); // create bitmask generate command file, and copy to curbm_path gen_bitmask_cmd_file(number_nodes, nreplication, curbm_path); System.out.println(" done"); } else { System.out.println("Resuming from current hadi_curbm which contains up to N(" + (cur_radius -1) + ")"); } N[0] = number_nodes; boolean eff_diameter_computed = false; // Iteratively run Stage1 to Stage3. for (i = cur_radius; i <= max_iteration; i++) { JobClient.runJob(configStage1(edge_type)); JobClient.runJob(configStage2()); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); final FileSystem fs = FileSystem.get(getConf()); // copy neighborhood information from HDFS to local disk, and read it! String new_path = local_output_path + "/" + i; fs.copyToLocalFile(output_path, new Path(new_path) ) ; HadiResultInfo ri = HadiUtils.readNhoodOutput(new_path); N[i] = ri.nh; iter_counter++; System.out.println("Nh(" + i + "):\t" + N[i] + "\tGuessed Radius(" + (i-1) + "):\t" + ri.converged_nodes ); // Stop when all radii converged. if( ri.changed_nodes == 0 ) {//if( i > 1 && N[i] == N[i-1] ) { System.out.println("All the bitstrings converged. Finishing..."); fs.delete(curbm_path); fs.delete(tempbm_path); fs.rename(nextbm_path, curbm_path); System.out.println("Calculating the effective diameter..."); JobClient.runJob(configStage4()); eff_diameter_computed = true; break; } // rotate directory. fs.delete(curbm_path); fs.delete(tempbm_path); if(i < MAX_ITERATIONS - 1 ) fs.delete(output_path); fs.rename(nextbm_path, curbm_path); cur_radius++; } if( eff_diameter_computed == false ) { System.out.println("Calculating the effective diameter..."); JobClient.runJob(configStage4()); } // Summarize Radius Information System.out.println("Summarizing radius information..."); JobClient.runJob(configStage5()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); // print summary information if( i > max_iteration ) System.out.println("Reached Max Iteartion " + max_iteration); System.out.println("Total Iteration = " + iter_counter + "."); System.out.println("Neighborhood Summary:"); for(int j = 0; j <= (i); j++) System.out.println("\tNh(" + (j) + "):\t" + N[j]); System.out.println("\n[PEGASUS] Radii and diameter computed."); System.out.println("[PEGASUS] Maximum diameter: " + (cur_radius - 1) ); System.out.println("[PEGASUS] Average diameter: " + HadiUtils.average_diameter(N, cur_radius - 1) ); System.out.println("[PEGASUS] 90% Effective diameter: " + HadiUtils.effective_diameter(N, cur_radius-1) ); System.out.println("[PEGASUS] Radii are saved in the HDFS " + radius_path.getName() ); System.out.println("[PEGASUS] Radii summary is saved in the HDFS " + radius_summary_path.getName() + "\n"); return 0; } // generate bitmask command file which is used in the 1st iteration. public void gen_bitmask_cmd_file(int number_nodes, int nreplication, Path curbm_path) throws IOException { int start_pos = 0; int i; int max_filesize = 10000000; for(i=0; i < number_nodes; i+=max_filesize) { int len=max_filesize; if(len > number_nodes-i) len = number_nodes - i; gen_bitmask_cmd_file(number_nodes, i, len, nreplication, curbm_path); } } // generate a part of the bitmask command file public void gen_bitmask_cmd_file(int number_nodes, int start_pos,int len, int nreplication, Path curbm_path) throws IOException { // generate a temporary local bitmask command file int i, j = 0, threshold = 0, count=0; String file_name = "bitmask_cmd.hadi."+number_nodes+"."+start_pos; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); out.write("# bitmask command file for HADI\n"); out.write("# number of nodes in graph = " + number_nodes+", start_pos="+start_pos+"\n"); System.out.println("creating bitmask generation cmd for node " + start_pos + " ~ " + (start_pos+len)); for(i=0; i < number_nodes; i++) { int cur_nodeid = start_pos + i; out.write(cur_nodeid + "\tc\n"); if(++j > len/10) { System.out.print("."); j = 0; } if(++count >= len) break; } out.close(); System.out.println(""); // copy it to curbm_path, and delete temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (curbm_path.toString()+ "/" + file_name) ); } // Configure Stage1 protected JobConf configStage1(EdgeType edgeType) throws Exception { final JobConf conf = new JobConf(getConf(), Hadi.class); conf.set("number_nodes", "" + number_nodes); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.set("make_symmetric", "" + make_symmetric); conf.setJobName("HADI_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path, curbm_path); FileOutputFormat.setOutputPath(conf, tempbm_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), Hadi.class); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.set("cur_radius", "" + cur_radius); conf.setJobName("HADI_Stage2" ); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); conf.setCombinerClass(CombinerStage2.class); FileInputFormat.setInputPaths(conf, tempbm_path); FileOutputFormat.setOutputPath(conf, nextbm_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), Hadi.class); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.setJobName("HADI_Stage3"); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); FileInputFormat.setInputPaths(conf, nextbm_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage4 protected JobConf configStage4 () throws Exception { final JobConf conf = new JobConf(getConf(), Hadi.class); conf.setJobName("HADI_Stage4"); conf.setMapperClass(MapStage4.class); FileInputFormat.setInputPaths(conf, curbm_path); FileOutputFormat.setOutputPath(conf, radius_path); conf.setNumReduceTasks( 0 ); //This is essential for map-only tasks. conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage5 protected JobConf configStage5 () throws Exception { final JobConf conf = new JobConf(getConf(), Hadi.class); conf.setJobName("HADI_Stage5"); conf.setMapperClass(MapStage5.class); conf.setReducerClass(RedStage5.class); conf.setCombinerClass(RedStage5.class); FileInputFormat.setInputPaths(conf, radius_path); FileOutputFormat.setOutputPath(conf, radius_summary_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/hadi/HadiIVGen.java0000644000000000000000000001757711443145611016450 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: HadiIVGen.java - generate initial bitstrings for HADI-BLOCK Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class HadiIVGen extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: Read bitstring generation command, and generate bitstrings. // - Input: bitstring generation command // - Output: nodeid TAB FM bitstring ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 3) return; output.collect( new IntWritable(Integer.parseInt(line[0])), new Text(line[1] + "\t" + line[2]) ); } } public static class RedStage1 extends MapReduceBase implements Reducer { int number_nodes = 0; int nreplication = 0; int encode_bitmask = 0; public void configure(JobConf job) { number_nodes = Integer.parseInt(job.get("number_nodes")); nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); System.out.println("RedStage1 : number_nodes = " + number_nodes + ", nreplication = " + nreplication + ", encode_bitmask = " + encode_bitmask); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int start_node, end_node; while (values.hasNext()) { Text cur_text = values.next(); final String[] line = cur_text.toString().split("\t"); start_node = Integer.parseInt(line[0]); end_node = Integer.parseInt(line[1]); for(int i = start_node; i <= end_node; i++) { String new_bitmask = generate_bitmask(number_nodes, nreplication); output.collect( new IntWritable(i), new Text(new_bitmask) ); } } } // generate K replicated bitmasks for one node public String generate_bitmask(int number_node, int K) { int i; int size_bitmask = (int) Math.ceil( Math.log(number_node) / Math.log(2) ); String bitmask = "vi0:0:1"; // v is the initial prefix for every vector. i means 'incomplete' int bm_array[] = new int[K]; for(i=0; i <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 5 ) { return printUsage(); } output_path = new Path(args[0]); String input_path_name = "hadi_ivcmd" + args[0].substring(args[0].length()-1); input_path = new Path(input_path_name); number_nodes = Integer.parseInt(args[1]); number_reducers = Integer.parseInt(args[2]); nreplication = Integer.parseInt(args[3]); if( args[4].compareTo("enc") == 0 ) encode_bitmask = 1; else encode_bitmask = 0; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Generating initial bistring vector. Output path = " + args[0] + ", number of nodes = " + number_nodes + ", number of reducers =" + number_reducers + ", nreplication=" + nreplication + ", encode_bitmask = " + encode_bitmask + "\n"); // Generate command file and copy to HDFS "input_ConCmptIVGen" gen_cmd_file(number_nodes, number_reducers, nreplication, input_path); // run job JobClient.runJob(configStage1()); fs = FileSystem.get(getConf()); fs.delete(input_path); System.out.println("\n[PEGASUS] Initial bistring vector for HADI generated in HDFS " + args[0] + "\n"); return 0; } // generate bitmask command file which is used in the 1st iteration. public void gen_cmd_file(int num_nodes, int num_reducers, int nreplication, Path input_path) throws IOException { // generate a temporary local command file int i; String file_name = "hadi_iv.temp"; FileWriter file = new FileWriter(file_name); BufferedWriter out = new BufferedWriter (file); out.write("# component vector file from HadiIVGen\n"); out.write("# number of nodes in graph = " + number_nodes + "\n"); System.out.print("creating initial vector generation cmd..."); int step = num_nodes/num_reducers; int start_node, end_node; for(i=0; i < num_reducers; i++) { start_node = i * step; if( i < num_reducers-1) end_node = step*(i+1) - 1; else end_node = num_nodes - 1; out.write(i + "\t" + start_node + "\t" + end_node + "\n"); } out.close(); System.out.println("done."); // copy it to curbm_path, and delete temporary local file. final FileSystem fs = FileSystem.get(getConf()); fs.copyFromLocalFile( true, new Path("./" + file_name), new Path (input_path.toString()+ "/" + file_name) ); } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), HadiIVGen.class); conf.set("number_nodes", "" + number_nodes); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.setJobName("HadiIVGen_pass1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, input_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( number_reducers ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/hadi/HadiBlock.java0000644000000000000000000005747611443145611016534 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: HadiBlock.java - A main class for Hadi-block. Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // HadiBlock Main Class public class HadiBlock extends Configured implements Tool { public static int MAX_ITERATIONS = 2048; public static float N[] = new float[MAX_ITERATIONS]; // save N(h) static int iter_counter = 0; ////////////////////////////////////////////////////////////////////// // STAGE 1: generate partial block-bitstring. // Hash-join edge and vector by Vector.BLOCKROWID == Edge.BLOCKCOLID where // vector: key=BLOCKID, value= msu (IN-BLOCK-INDEX VALUE)s // moc // edge: key=BLOCK-ROW BLOCK-COL, value=(IN-BLOCK-ROW IN-BLOCK-COL VALUE)s // - Input: edge_file, bitstrings_from_the_last_iteration // - Output: partial bitstrings ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in the edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) return; if( line.length == 2 ) { // vector IntWritable node_key = new IntWritable( Integer.parseInt(line[0]) ); output.collect( node_key, new Text(line[1]) ); } else { // edge. line.length = 3. IntWritable node_key = new IntWritable( Integer.parseInt(line[1]) ); output.collect( node_key, new Text(line[0] + "\t" + line[2]) ); } } } public static class RedStage1 extends MapReduceBase implements Reducer { int nreplication = 0; int encode_bitmask = 0; int block_width = 0; public void configure(JobConf job) { nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); block_width = Integer.parseInt(job.get("block_width")); System.out.println("RedStage1: nreplication = " + nreplication + ", encode_bitmask="+encode_bitmask+", block_width=" + block_width); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { ArrayList> vectorArr = null; // save vector ArrayList>> blockArr = new ArrayList>>(); // save blocks ArrayList blockRowArr = new ArrayList(); // save block rows(integer) while (values.hasNext()) { // vector: key=BLOCKID, value= (IN-BLOCK-INDEX VALUE)s // edge: key=BLOCK-COLID BLOCK-ROWID, value=(IN-BLOCK-COL IN-BLOCK-ROW VALUE)s String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { // vector : VALUE vectorArr = GIMV.parseHADIVector(line_text); } else { // edge : BLOCK-ROWID VALUE blockArr.add( GIMV.parseBlockVal(line[1], Integer.class) ); int block_row = Integer.parseInt(line[0]); blockRowArr.add( block_row ); } } if( vectorArr == null) return; // output 'self' block to check convergence Text self_output = GIMV.formatHADIVectorElemOutput("s", vectorArr); output.collect(key, self_output); // For every matrix block, join it with vector and output partial results Iterator>> blockArrIter = blockArr.iterator(); Iterator blockRowIter = blockRowArr.iterator(); int block_col_id = key.get(); while( blockArrIter.hasNext() ){ ArrayList> cur_block = blockArrIter.next(); int cur_block_row = blockRowIter.next(); ArrayList> cur_mult_result = GIMV.bworBlockVector( cur_block, vectorArr, block_width, nreplication, encode_bitmask); if( cur_mult_result.size() > 0 ) { Text partial_output = GIMV.formatVectorElemOutput("o", cur_mult_result); output.collect(new IntWritable(cur_block_row), partial_output); } } } } //////////////////////////////////////////////////////////////////////////////////////////////// // STAGE 2: merge partial bitstrings. // - Input: partial bitstrings // - Output: combined bitstrings //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapStage2 extends MapReduceBase implements Mapper { // Identity mapper public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]) ); } } public static class RedStage2 extends MapReduceBase implements Reducer { int nreplication = 0; int encode_bitmask = 0; int cur_radius = 0; int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); cur_radius = Integer.parseInt(job.get("cur_radius")); System.out.println("RedStage2: block_width = " + block_width + ", nreplication = " + nreplication + ", encode_bitmask = "+encode_bitmask +", cur_radius = " + cur_radius); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i,j; long [][] self_bm = new long[block_width][nreplication]; long [][] out_vals = new long[block_width][nreplication]; char [] prefix = new char[block_width]; String [] saved_rad_nh= new String[block_width]; for(i=0; i < block_width; i++) for(j=0; j < nreplication; j++) out_vals[i][j] = 0; while (values.hasNext()) { String cur_str = values.next().toString(); ArrayList> cur_vector = GIMV.parseHADIVector(cur_str); Iterator> vector_iter = cur_vector.iterator(); j = 0; while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); out_vals[ v_elem.row ] = GIMV.updateHADIBitString( out_vals[v_elem.row], v_elem.val, nreplication, encode_bitmask ); if( cur_str.charAt(0) == 's' ) { self_bm[ v_elem.row ] = GIMV.parseHADIBitString( v_elem.val, nreplication, encode_bitmask ); prefix[j] = v_elem.val.charAt(0); int tindex = v_elem.val.indexOf('~'); if( tindex >= 2 ) saved_rad_nh[j] = v_elem.val.substring(1, tindex); j++; } } } ArrayList> new_vector = GIMV.makeHADIBitString( out_vals, block_width, self_bm, prefix, saved_rad_nh, nreplication, cur_radius, encode_bitmask ); output.collect(key, GIMV.formatVectorElemOutput("s", new_vector) ); } } public static class CombinerStage2 extends MapReduceBase implements Reducer { int nreplication = 0; int encode_bitmask = 0; int cur_radius = 0; int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); cur_radius = Integer.parseInt(job.get("cur_radius")); System.out.println("CombinerStage2: block_width = " + block_width + ", nreplication = " + nreplication + ", encode_bitmask = "+encode_bitmask +", cur_radius = " + cur_radius); } public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i,j; long [][] out_vals = new long[block_width][nreplication]; int bs_count = 0; for(i=0; i < block_width; i++) for(j=0; j < nreplication; j++) out_vals[i][j] = 0; while (values.hasNext()) { String cur_str = values.next().toString(); ArrayList> cur_vector = GIMV.parseHADIVector(cur_str); if( cur_str.charAt(0) == 's' ) { output.collect(key, new Text(cur_str) ); continue; } Iterator> vector_iter = cur_vector.iterator(); j = 0; while( vector_iter.hasNext() ) { VectorElem v_elem = vector_iter.next(); out_vals[ v_elem.row ] = GIMV.updateHADIBitString( out_vals[v_elem.row], v_elem.val, nreplication, encode_bitmask ); } bs_count++; } if( bs_count > 0 ) { ArrayList> new_vector = GIMV.makeHADICombinerBitString( out_vals, block_width, nreplication, cur_radius, encode_bitmask ); output.collect(key, GIMV.formatVectorElemOutput("o", new_vector) ); } } } ////////////////////////////////////////////////////////////////////// // STAGE 3: Calculate N(h) and the number of changed nodes. // - Input: the converged bitstrings // - Output: Neighborhood(h) TAB number_of_converged_nodes TAB number_of_changed_nodes ////////////////////////////////////////////////////////////////////// public static class MapStage3 extends MapReduceBase implements Mapper { private final IntWritable zero_id = new IntWritable(0); private Text output_val; int nreplication = 0; int encode_bitmask = 0; public void configure(JobConf job) { nreplication = Integer.parseInt(job.get("nreplication")); encode_bitmask = Integer.parseInt(job.get("encode_bitmask")); System.out.println("MapStage3 : nreplication = " + nreplication + ", encode_bitmask="+encode_bitmask); } // input sample : // 1 s0 c4:4:7.5~e0000000~c0000000... 1 f2:2:3.7~e0000000~c0000000... public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { int i,j; String[] line = value.toString().split("\t"); String[] tokens = line[1].substring(1).split(" "); double sum_nh = 0; int converged_count = 0; int changed_count = 0; for(i = 0; i < tokens.length; i += 2 ) { String cur_elem = tokens[i+1]; if( cur_elem.charAt(0) == 'c' ) converged_count++; if( cur_elem.charAt(0) == 'i' ) changed_count++; double avg_bitpos = 0; if( encode_bitmask == 1 ) { int bitmask_start_index = cur_elem.indexOf('~'); String bitmask_str = cur_elem.substring(bitmask_start_index+1); int [] bitmask = BitShuffleCoder.decode_bitmasks( bitmask_str, nreplication ); for(j = 0; j < nreplication; j++) avg_bitpos += (double) FMBitmask.find_least_zero_pos( bitmask[j] ); } else { String[] bitmasks = cur_elem.split("~"); for(j = 1; j < bitmasks.length; j++) avg_bitpos += (double) FMBitmask.find_least_zero_pos( Long.parseLong( bitmasks[j], 16 ) ); } avg_bitpos = avg_bitpos / nreplication; sum_nh += Math.pow(2, avg_bitpos)/0.77351; } output_val = new Text( Double.toString(sum_nh ) + "\t" + converged_count + "\t" + changed_count); output.collect(zero_id, output_val); } } public static class RedStage3 extends MapReduceBase implements Reducer { private Text output_val; public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { double nh_sum = 0.0f; // N(h) int converged_sum = 0; // number of converged nodes at this iteration int changed_sum = 0; // number of changed nodes while (values.hasNext()) { final String[] line = values.next().toString().split("\t"); nh_sum += Double.parseDouble(line[0]); converged_sum += Integer.parseInt(line[1]); changed_sum += Integer.parseInt(line[2]); } output_val = new Text( Double.toString(nh_sum) + "\t" + Integer.toString(converged_sum) + "\t" + Integer.toString(changed_sum) ); output.collect(key, output_val); } } ////////////////////////////////////////////////////////////////////// // STAGE 4: Calculate the effective radii of nodes, after the bitstrings converged. // This is a map-only stage. // - Input: the converged bitstrings // - Output: (node_id, "bsf"max_radius:eff_radius) ////////////////////////////////////////////////////////////////////// public static class MapStage4 extends MapReduceBase implements Mapper { int block_width; public void configure(JobConf job) { block_width = Integer.parseInt(job.get("block_width")); System.out.println("MapStage4: block_width = " + block_width); } // input sample : // 3 s0 i1:1:2.0:2:5.8~e0000000~e0000000... 1 i1:1:1.6:2:2.4~e0000000~c0000000... public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { final String[] line = value.toString().split("\t"); final String[] tokens = line[1].substring(1).split(" "); int i, j; int block_id = Integer.parseInt(line[0] ); for(i = 0; i < tokens.length; i+=2) { int max_radius = 0; double eff_radius=0;//int eff_radius = 0; double eff_nh = 0; String bit_str = tokens[i+1].substring(1) ; if( bit_str.length() > 0 ) { String[] radius_str = bit_str.split("~"); String[] radius_info = radius_str[0].split(":"); if( radius_info.length > 1 ) { max_radius = Integer.parseInt( radius_info[ radius_info.length -2] ); eff_radius = max_radius; double max_nh = Double.parseDouble( radius_info[ radius_info.length -1] ); eff_nh = max_nh; double ninety_th = max_nh * 0.9; for(j = radius_info.length -4; j >=1; j -= 2) { int cur_hop = Integer.parseInt( radius_info[j] ); double cur_nh = Double.parseDouble( radius_info[j+1] ); if( cur_nh >= ninety_th ) { eff_radius = cur_hop; eff_nh = cur_nh; } else { eff_radius = cur_hop + (double)(ninety_th - cur_nh)/(eff_nh - cur_nh); break; } } } int elem_row = Integer.parseInt(tokens[i]); DecimalFormat df = new DecimalFormat("#.##"); output.collect( new IntWritable(block_width * block_id + elem_row), new Text("bsf" + max_radius + ":" + df.format(eff_radius)) ); } } } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path curbm_path = null; protected Path tempbm_path = null; protected Path nextbm_path = null; protected Path output_path = null; protected Path radius_path = null; protected Path radius_summary_path = null; protected String local_output_path; protected int number_nodes = 0; protected int nreplication = 0; protected int nreducer = 1; protected int encode_bitmask = 0; protected int cur_radius = 1; protected int start_from_newbm = 0; protected int resume_from_radius = 0; protected int block_width = 16; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new HadiBlock(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("hadiblock <# of nodes> <# of replication> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { int i; int max_iteration = MAX_ITERATIONS; if( args.length != 12 ) { return printUsage(); } edge_path = new Path(args[0]); curbm_path = new Path(args[1]); tempbm_path = new Path(args[2]); nextbm_path = new Path(args[3]); output_path = new Path(args[4]); number_nodes = Integer.parseInt(args[5]); radius_path = new Path("hadi_radius_block"); radius_summary_path = new Path("hadi_radius_block_summary"); nreplication = Integer.parseInt(args[6]); nreducer = Integer.parseInt(args[7]); if( args[8].compareTo("enc") == 0 ) encode_bitmask = 1; if( args[9].compareTo("newbm") == 0 ) start_from_newbm = 1; else { start_from_newbm = 0; cur_radius = Integer.parseInt(args[9].substring(4)); } block_width = Integer.parseInt(args[10]); if( args[11].compareTo("max") != 0 ) max_iteration = Integer.parseInt(args[11]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing Radii/Diameter using block method. Current hop: " + cur_radius + ", edge_path: " + args[0] + ", encode: " + encode_bitmask + ", # reducers: " + nreducer + ", block width: " + block_width + ", max_iteration: " + max_iteration + "\n"); local_output_path = args[4] + number_nodes + "_tempblk"; N[0] = number_nodes; // Iteratively run Stage1 to Stage3. for (i = cur_radius; i <= max_iteration; i++) { JobClient.runJob(configStage1()); JobClient.runJob(configStage2()); JobClient.runJob(configStage3()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); final FileSystem fs = FileSystem.get(getConf()); // copy neighborhood information from HDFS to local disk, and read it! String new_path = local_output_path + "/" + i; fs.copyToLocalFile(output_path, new Path(new_path) ) ; HadiResultInfo ri = HadiUtils.readNhoodOutput(new_path); N[i] = ri.nh; iter_counter++; System.out.println("Nh(" + i + "):\t" + N[i] + "\tGuessed Radius(" + (i-1) + "):\t" + ri.converged_nodes ); // Stop when all radii converged. if( ri.changed_nodes == 0 ) {//if( i > 1 && N[i] == N[i-1] ) { System.out.println("All the bitstrings converged. Finishing..."); fs.delete(curbm_path); fs.delete(tempbm_path); fs.rename(nextbm_path, curbm_path); break; } // rotate directory fs.delete(curbm_path); fs.delete(tempbm_path); if(i < MAX_ITERATIONS - 1 ) fs.delete(output_path); fs.rename(nextbm_path, curbm_path); cur_radius++; } // Summarize Radius Information System.out.println("Calculating the effective diameter..."); JobClient.runJob(configStage4()); // Summarize Radius Information System.out.println("Summarizing radius information..."); JobClient.runJob(configStage5()); FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path)); // print summary information if( i > max_iteration ) System.out.println("Reached Max Iteartion " + max_iteration); System.out.println("Total Iteration = " + iter_counter + "."); System.out.println("Neighborhood Summary:"); for(int j = 0; j <= (i); j++) System.out.println("\tNh(" + (j) + "):\t" + N[j]); System.out.println("\n[PEGASUS] Radii and diameter computed."); System.out.println("[PEGASUS] Maximum diameter: " + (cur_radius - 1) ); System.out.println("[PEGASUS] Average diameter: " + HadiUtils.average_diameter(N, cur_radius - 1) ); System.out.println("[PEGASUS] 90% Effective diameter: " + HadiUtils.effective_diameter(N, cur_radius-1) ); System.out.println("[PEGASUS] Radii are saved in the HDFS " + radius_path.getName() ); System.out.println("[PEGASUS] Radii summary is saved in the HDFS " + radius_summary_path.getName() + "\n"); return 0; } // Configure pass1 protected JobConf configStage1() throws Exception { final JobConf conf = new JobConf(getConf(), HadiBlock.class); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.set("block_width", "" + block_width); conf.setJobName("HADIBlk_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileInputFormat.setInputPaths(conf, edge_path, curbm_path); FileOutputFormat.setOutputPath(conf, tempbm_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configStage2 () throws Exception { final JobConf conf = new JobConf(getConf(), HadiBlock.class); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.set("cur_radius", "" + cur_radius); conf.set("block_width", "" + block_width); conf.setJobName("HADIBlk_Stage2"); conf.setMapperClass(MapStage2.class); conf.setReducerClass(RedStage2.class); conf.setCombinerClass(CombinerStage2.class); FileInputFormat.setInputPaths(conf, tempbm_path); FileOutputFormat.setOutputPath(conf, nextbm_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage3 protected JobConf configStage3 () throws Exception { final JobConf conf = new JobConf(getConf(), HadiBlock.class); conf.set("nreplication", "" + nreplication); conf.set("encode_bitmask", "" + encode_bitmask); conf.setJobName("HADIBlk_Stage3"); conf.setMapperClass(MapStage3.class); conf.setReducerClass(RedStage3.class); conf.setCombinerClass(RedStage3.class); FileInputFormat.setInputPaths(conf, nextbm_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage4 protected JobConf configStage4 () throws Exception { final JobConf conf = new JobConf(getConf(), HadiBlock.class); conf.set("block_width", "" + block_width); conf.setJobName("HADIBlk_Stage4"); conf.setMapperClass(MapStage4.class); FileInputFormat.setInputPaths(conf, curbm_path); FileOutputFormat.setOutputPath(conf, radius_path); conf.setNumReduceTasks( 0 ); //This is essential for map-only tasks. conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); return conf; } // Configure Stage5 protected JobConf configStage5 () throws Exception { final JobConf conf = new JobConf(getConf(), HadiBlock.class); conf.setJobName("HADIBlk_Stage5"); // reuse maper and reducers from Hadi class. conf.setMapperClass(Hadi.MapStage5.class); conf.setReducerClass(Hadi.RedStage5.class); conf.setCombinerClass(Hadi.RedStage5.class); FileInputFormat.setInputPaths(conf, radius_path); FileOutputFormat.setOutputPath(conf, radius_summary_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/degdist/0000755000000000000000000000000011443145611014544 5ustar rootrootPEGASUS/src/pegasus/degdist/DegDist.java0000644000000000000000000002015711443145611016737 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: DegDist.java - Degree Distribution Version: 2.0 ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class DegDist extends Configured implements Tool { static int InDeg = 1, OutDeg = 2, InOutDeg = 3; ////////////////////////////////////////////////////////////////////// // PASS 1: group by node id. // Input : edge list // Output : key(node_id), value(degree) ////////////////////////////////////////////////////////////////////// public static class MapPass1 extends MapReduceBase implements Mapper { int deg_type = 0; public void configure(JobConf job) { deg_type = Integer.parseInt(job.get("deg_type")); System.out.println("MapPass1 : configure is called. degtype = " + deg_type ); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; String[] line = line_text.split("\t"); IntWritable one_int = new IntWritable(1); if( deg_type == OutDeg ) { IntWritable key_node_int = new IntWritable(); key_node_int.set(Integer.parseInt(line[0])); output.collect(key_node_int, one_int); } else if( deg_type == InDeg) { output.collect( new IntWritable(Integer.parseInt(line[1])), one_int ); } else if( deg_type == InOutDeg) { // emit both IntWritable from_node_int = new IntWritable(); IntWritable to_node_int = new IntWritable(); from_node_int.set(Integer.parseInt(line[0])); to_node_int.set(Integer.parseInt(line[1])); output.collect(from_node_int, to_node_int); output.collect(to_node_int, from_node_int); } } } public static class RedPass1 extends MapReduceBase implements Reducer { private final IntWritable one_int = new IntWritable(1); int deg_type = 0; public void configure(JobConf job) { deg_type = Integer.parseInt(job.get("deg_type")); System.out.println("RedPass1 : configure is called. degtype = " + deg_type ); } public void reduce (final IntWritable key, final Iterator values, OutputCollector output, final Reporter reporter) throws IOException { int degree = 0; if( deg_type != InOutDeg) { while (values.hasNext()) { int cur_degree = values.next().get(); degree += cur_degree; } output.collect(key, new IntWritable(degree) ); } else { // deg_type == InOutDeg Set outEdgeSet = new TreeSet(); while (values.hasNext()) { int cur_outedge = values.next().get(); outEdgeSet.add( cur_outedge ); } output.collect(key, new IntWritable(outEdgeSet.size()) ); } } } //////////////////////////////////////////////////////////////////////////////////////////////// // PASS 2: group by degree // Input : key(node id), value(degree) // Output : key(degree), value(count) //////////////////////////////////////////////////////////////////////////////////////////////// public static class MapPass2 extends MapReduceBase implements Mapper { public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String[] line = value.toString().split("\t"); output.collect(new IntWritable(Integer.parseInt(line[1])), new IntWritable(1) ); } } public static class RedPass2 extends MapReduceBase implements Reducer { public void reduce (final IntWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int count = 0; while (values.hasNext()) { int cur_count = values.next().get(); count += cur_count; } output.collect(key, new IntWritable(count) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path node_deg_path = null; protected Path deg_count_path = null; protected int nreducer = 1; protected int deg_type; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new DegDist(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("DegDist <# of reducer> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 5 ) { return printUsage(); } edge_path = new Path(args[0]); node_deg_path = new Path(args[1]); deg_count_path = new Path(args[2]); String deg_type_str = "In"; deg_type = InDeg; if( args[3].compareTo("out") == 0 ) { deg_type = OutDeg; deg_type_str = "Out"; } else if( args[3].compareTo("inout") == 0 ) { deg_type = InOutDeg; deg_type_str = "InOut"; } nreducer = Integer.parseInt(args[4]); System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Computing degree distribution. Degree type = " + deg_type_str + "\n"); // run job JobClient.runJob(configPass1()); JobClient.runJob(configPass2()); System.out.println("\n[PEGASUS] Degree distribution computed."); System.out.println("[PEGASUS] (NodeId, Degree) is saved in HDFS " + args[1] + ", (Degree, Count) is saved in HDFS " + args[2] + "\n" ); return 0; } // Configure pass1 protected JobConf configPass1() throws Exception { final JobConf conf = new JobConf(getConf(), DegDist.class); conf.set("deg_type", "" + deg_type); conf.setJobName("DegDist_pass1"); conf.setMapperClass(MapPass1.class); conf.setReducerClass(RedPass1.class); if( deg_type != InOutDeg) { conf.setCombinerClass(RedPass1.class); } FileInputFormat.setInputPaths(conf, edge_path); FileOutputFormat.setOutputPath(conf, node_deg_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } // Configure pass2 protected JobConf configPass2 () throws Exception { final JobConf conf = new JobConf(getConf(), DegDist.class); conf.setJobName("DegDist_pass2"); conf.setMapperClass(MapPass2.class); conf.setReducerClass(RedPass2.class); conf.setCombinerClass(RedPass2.class); FileInputFormat.setInputPaths(conf, node_deg_path); FileOutputFormat.setOutputPath(conf, deg_count_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); return conf; } } PEGASUS/src/pegasus/matvec/0000755000000000000000000000000011443145611014400 5ustar rootrootPEGASUS/src/pegasus/matvec/MatvecNaive.java0000644000000000000000000002476511443145611017463 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: MatvecNaive.java - Plain matrix vector multiplication. Version: 2.0 ***********************************************************************/ package pegasus.matvec; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; public class MatvecNaive extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // PASS 1: Hash join using Vector.rowid == Matrix.colid ////////////////////////////////////////////////////////////////////// public static class MapPass1 extends MapReduceBase implements Mapper { private final LongWritable from_node_int = new LongWritable(); int makesym = 0; int transpose = 0; int ignore_weights = 0; public void configure(JobConf job) { makesym = Integer.parseInt(job.get("makesym")); transpose = Integer.parseInt(job.get("transpose")); ignore_weights = Integer.parseInt(job.get("ignore_weights")); String input_file = job.get("map.input.file"); System.out.println("MatvecNaive.MapPass1: makesym = " + makesym); System.out.println("input_file = " + input_file); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if( line.length == 2 || ignore_weights == 1) { // vector : ROWID VALUE('vNNNN') if( line[1].charAt(0) == 'v' ) { // vector : ROWID VALUE('vNNNN') from_node_int.set( Long.parseLong(line[0]) ); output.collect( from_node_int, new Text(line[1]) ); } else { // edge : ROWID COLID if(transpose == 0) { output.collect( new LongWritable(Long.parseLong(line[1])), new Text(line[0]) ); if(makesym == 1) output.collect( new LongWritable(Long.parseLong(line[0])), new Text(line[1]) ); } else { output.collect( new LongWritable(Long.parseLong(line[0])), new Text(line[1]) ); if(makesym == 1) output.collect( new LongWritable(Long.parseLong(line[1])), new Text(line[0]) ); } } } else if(line.length == 3) { // edge: ROWID COLID VALUE if(transpose == 0) { output.collect( new LongWritable(Long.parseLong(line[1])), new Text(line[0] + "\t" + line[2]) ); if(makesym == 1) output.collect( new LongWritable(Long.parseLong(line[0])), new Text(line[1] + "\t" + line[2]) ); } else { output.collect( new LongWritable(Long.parseLong(line[0])), new Text(line[1] + "\t" + line[2]) ); if(makesym == 1) output.collect( new LongWritable(Long.parseLong(line[1])), new Text(line[0] + "\t" + line[2]) ); } } } } public static class RedPass1 extends MapReduceBase implements Reducer { ArrayList to_nodes_list = new ArrayList(); ArrayList to_val_list = new ArrayList(); public void reduce (final LongWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double vector_val = 0; boolean isValReceived = false; Map to_map = new HashMap(); while (values.hasNext()) { String line_text = values.next().toString(); final String[] line = line_text.split("\t"); if( line.length == 1 ) { if(line_text.charAt(0) == 'v') { // vector : VALUE vector_val = Double.parseDouble(line_text.substring(1)); if( isValReceived == false ) { isValReceived = true; // empty queue Iterator> iter = to_map.entrySet().iterator(); while(iter.hasNext()){ Map.Entry entry = iter.next(); output.collect( new LongWritable( entry.getKey() ), new DoubleWritable( vector_val * entry.getValue() ) ); } to_map.clear(); } } else { // edge : ROWID if( isValReceived == false ) to_map.put(Long.parseLong(line[0]), new Double(1.0) ); else { output.collect(new LongWritable(Long.parseLong(line[0])), new DoubleWritable(vector_val) ); } } } else { // edge : ROWID VALUE if( isValReceived == false ) to_map.put(Long.parseLong(line[0]), Double.parseDouble(line[1]) ); else { output.collect(new LongWritable(Long.parseLong(line[0])), new DoubleWritable(vector_val * Double.parseDouble(line[1])) ); } } } } } ////////////////////////////////////////////////////////////////////// // PASS 2: merge partial multiplication results ////////////////////////////////////////////////////////////////////// public static class MapPass2 extends MapReduceBase implements Mapper { private final LongWritable from_node_int = new LongWritable(); public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); from_node_int.set( Long.parseLong(line[0]) ); output.collect( from_node_int, new DoubleWritable( Double.parseDouble(line[1]) ) ); } } public static class RedPass2 extends MapReduceBase implements Reducer { public void reduce (final LongWritable key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { int i; double next_rank = 0; while (values.hasNext()) { String cur_value_str = values.next().toString(); next_rank += Double.parseDouble( cur_value_str ) ; } output.collect( key, new Text( "v" + next_rank ) ); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path tempmv_path = null; protected Path output_path = null; protected Path vector_path = null; protected int number_nodes = 0; protected int nreducer = 1; int makesym = 0; int transpose = 0; int ignore_weights = 0; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new MatvecNaive(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("MatvecNaive <# of nodes> <# of reducers> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length < 5 ) { return printUsage(); } edge_path = new Path(args[0]); tempmv_path = new Path(args[1]); output_path = new Path(args[2]); nreducer = Integer.parseInt(args[3]); if(args[4].equals("makesym")) makesym = 1; if( args.length > 5 ) vector_path = new Path(args[5]); if( args.length > 6 ) transpose = Integer.parseInt(args[6]); if( args.length > 7 ) ignore_weights = Integer.parseInt(args[7]); final FileSystem fs = FileSystem.get(getConf()); fs.delete(tempmv_path); fs.delete(output_path); JobClient.runJob(configPass1()); JobClient.runJob(configPass2()); fs.delete(tempmv_path); return 0; } // Configure pass1 protected JobConf configPass1 () throws Exception { final JobConf conf = new JobConf(getConf(), MatvecNaive.class); conf.set("number_nodes", "" + number_nodes); conf.set("makesym", "" + makesym); conf.set("transpose", "" + transpose); conf.set("ignore_weights", "" + ignore_weights); conf.setJobName("MatvecNaive_pass1"); conf.setMapperClass(MapPass1.class); conf.setReducerClass(RedPass1.class); if( vector_path == null ) FileInputFormat.setInputPaths(conf, edge_path); else FileInputFormat.setInputPaths(conf, edge_path, vector_path); FileOutputFormat.setOutputPath(conf, tempmv_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(LongWritable.class); conf.setOutputValueClass(DoubleWritable.class); conf.setMapOutputValueClass(Text.class); return conf; } // Configure pass2 protected JobConf configPass2 () throws Exception { final JobConf conf = new JobConf(getConf(), MatvecNaive.class); conf.set("number_nodes", "" + number_nodes); conf.setJobName("MatvecNaive_pass2"); conf.setMapperClass(MapPass2.class); conf.setReducerClass(RedPass2.class); FileInputFormat.setInputPaths(conf, tempmv_path); FileOutputFormat.setOutputPath(conf, output_path); conf.setNumReduceTasks( nreducer ); conf.setOutputKeyClass(LongWritable.class); conf.setMapOutputValueClass(DoubleWritable.class); conf.setOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/matvec/MatvecPrep.java0000644000000000000000000002225411443145611017316 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Authors: U Kang, Duen Horng Chau, and Christos Faloutsos This software is licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------- File: MatvecPrep.java - convert matrix(edges) or vectors into block form. This program is used for converting data to be used in the block version of HADI, HCC, and PageRank. Version: 2.0 ***********************************************************************/ package pegasus.matvec; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; public class MatvecPrep extends Configured implements Tool { ////////////////////////////////////////////////////////////////////// // STAGE 1: convert vectors and edges to block format // (a) (vector) ROWID vVALUE => BLOCKID IN-BLOCK-INDEX VALUE // (b) (real matrix) ROWID COLID VALUE // => BLOCK-ROW BLOCK-COL IN-BLOCK-ROW IN-BLOCK-COL VALUE // (c) (0-1 matrix) ROWID COLID // => BLOCK-ROW BLOCK-COL IN-BLOCK-ROW IN-BLOCK-COL VALUE ////////////////////////////////////////////////////////////////////// public static class MapStage1 extends MapReduceBase implements Mapper { int block_size; int matrix_row; int makesym; public void configure(JobConf job) { block_size = Integer.parseInt(job.get("block_size")); matrix_row = Integer.parseInt(job.get("matrix_row")); makesym = Integer.parseInt(job.get("makesym")); System.out.println("MapStage1: block_size = " + block_size + ", matrix_row=" + matrix_row + ", makesym = " + makesym); } public void map (final LongWritable key, final Text value, final OutputCollector output, final Reporter reporter) throws IOException { String line_text = value.toString(); if (line_text.startsWith("#")) // ignore comments in edge file return; final String[] line = line_text.split("\t"); if(line.length < 2 ) return; if( line[1].charAt(0) == 'v') { // (vector) ROWID vVALUE => BLOCKID IN-BLOCK-INDEX VALUE int row_id = Integer.parseInt(line[0]); int block_id = row_id / block_size; int in_block_index = row_id % block_size; output.collect( new Text("" + block_id), new Text("" + in_block_index + " " + line[1].substring(1)) ); } else { int row_id = Integer.parseInt(line[0]); int col_id = Integer.parseInt(line[1]); int block_rowid = row_id / block_size; int block_colid = col_id / block_size; int in_block_row = col_id % block_size; // trick : transpose int in_block_col = row_id % block_size; // trick : transpose if( line.length == 3 ) { // (real matrix) ROWID COLID VALUE // => BLOCK-ROW BLOCK-COL IN-BLOCK-ROW IN-BLOCK-COL VALUE String elem_val; if(line[2].charAt(0) == 'v') elem_val = line[2].substring(1); else elem_val = line[2]; output.collect( new Text("" + block_rowid + "\t" + block_colid), new Text("" + in_block_row + " " + in_block_col + " " + line[2]) ); } else { // (0-1 matrix) ROWID COLID // => BLOCK-ROW BLOCK-COL IN-BLOCK-ROW IN-BLOCK-COL output.collect( new Text("" + block_rowid + "\t" + block_colid), new Text("" + in_block_row + " " + in_block_col) ); if( makesym == 1 ) // output transposed entry output.collect( new Text("" + block_colid + "\t" + block_rowid), new Text("" + in_block_col + " " + in_block_row) ); } } } } static class MvPrepComparator implements Comparator { public int compare(Object o1, Object o2) { String s1 = o1.toString(); String s2 = o2.toString(); int pos1 = s1.indexOf(' '); int pos2 = s2.indexOf(' '); int val1 = Integer.parseInt(s1.substring(0,pos1)); int val2 = Integer.parseInt(s2.substring(0,pos2)); return (val1-val2); } public boolean equals(Object o1, Object o2) { String s1 = o1.toString(); String s2 = o2.toString(); int pos1 = s1.indexOf(' '); int pos2 = s2.indexOf(' '); int val1 = Integer.parseInt(s1.substring(0,pos1)); int val2 = Integer.parseInt(s2.substring(0,pos2)); if( val1 == val2 ) return true; else return false; } } public static class RedStage1 extends MapReduceBase implements Reducer { String out_prefix = ""; MvPrepComparator mpc = new MvPrepComparator(); public void configure(JobConf job) { out_prefix = job.get("out_prefix"); System.out.println("RedStage1: out_prefix = " + out_prefix); } public void reduce (final Text key, final Iterator values, final OutputCollector output, final Reporter reporter) throws IOException { String out_value = ""; ArrayList value_al = new ArrayList(); while (values.hasNext()) { // vector: key=BLOCKID, value= IN-BLOCK-INDEX VALUE // matrix: key=BLOCK-ROW BLOCK-COL, value=IN-BLOCK-ROW IN-BLOCK-COL VALUE String value_text = values.next().toString(); value_al.add( value_text ); } Collections.sort(value_al, mpc ); Iterator iter = value_al.iterator(); while( iter.hasNext() ){ String cur_val = iter.next(); if( out_value.length() != 0 ) out_value += " "; out_value += cur_val; } value_al.clear(); if( out_prefix != null ) output.collect(key, new Text(out_prefix + out_value)); else output.collect(key, new Text(out_value)); } } ////////////////////////////////////////////////////////////////////// // command line interface ////////////////////////////////////////////////////////////////////// protected Path edge_path = null; protected Path output_path = null; protected int number_nodes = 0; protected int block_size = 1; protected int nreducer = 1; protected String output_prefix; protected int makesym = 0; // Main entry point. public static void main (final String[] args) throws Exception { final int result = ToolRunner.run(new Configuration(), new MatvecPrep(), args); System.exit(result); } // Print the command-line usage text. protected static int printUsage () { System.out.println("MatvecPrep <# of row> <# of reducer> "); ToolRunner.printGenericCommandUsage(System.out); return -1; } // submit the map/reduce job. public int run (final String[] args) throws Exception { if( args.length != 7 ) { return printUsage(); } edge_path = new Path(args[0]); output_path = new Path(args[1]); number_nodes = Integer.parseInt(args[2]); // number of row of matrix block_size = Integer.parseInt(args[3]); nreducer = Integer.parseInt(args[4]); if( args[5].compareTo("null") == 0 ) output_prefix = ""; else output_prefix = args[5]; if( args[6].compareTo("makesym") == 0 ) makesym = 1; else makesym = 0; System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n"); System.out.println("[PEGASUS] Converting the adjacency matrix to block format. Output_prefix = " + output_prefix + ", makesym = " + makesym + ", block width=" + block_size + "\n"); // run job JobClient.runJob(configStage1(output_prefix)); System.out.println("\n[PEGASUS] Conversion finished."); System.out.println("[PEGASUS] Block adjacency matrix is saved in the HDFS " + args[1] + "\n"); return 0; } // Configure pass1 protected JobConf configStage1 (String out_prefix) throws Exception { final JobConf conf = new JobConf(getConf(), MatvecPrep.class); conf.set("block_size", "" + block_size); conf.set("matrix_row", "" + number_nodes); conf.set("out_prefix", "" + out_prefix); conf.set("makesym", "" + makesym); conf.setJobName("MatvecPrep_Stage1"); conf.setMapperClass(MapStage1.class); conf.setReducerClass(RedStage1.class); FileSystem fs = FileSystem.get(getConf()); fs.delete(output_path, true); FileInputFormat.setInputPaths(conf, edge_path); FileOutputFormat.setOutputPath(conf, output_path); int num_reduce_tasks = nreducer; conf.setNumReduceTasks( num_reduce_tasks ); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setMapOutputValueClass(Text.class); return conf; } } PEGASUS/src/pegasus/matvec/MatvecUtils.java0000644000000000000000000000352711443145611017512 0ustar rootroot/*********************************************************************** PEGASUS: Peta-Scale Graph Mining System Copyright (c) 2009 U Kang and Christos Faloutsos All Rights Reserved You may use this code without fee, for educational and research purposes. Any for-profit use requires written consent of the copyright holders. ------------------------------------------------------------------------- File: PegasusUtils.java - Common utility classes and functions Version: 0.9 Author Email: U Kang(ukang@cs.cmu.edu), Christos Faloutsos(christos@cs.cmu.edu) ***********************************************************************/ package pegasus; import java.io.*; import java.util.*; import java.text.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.util.*; // common utility functions public class MatvecUtils { // convert Vector string to array of VectorElem. // strVal is (ROW-ID VALUE)s. ex) 0 0.5 1 0.3 public static double[] decodeBlockVector(String strVal, int block_width) { int i; double [] vector = new double[block_width]; for(i=0; i< block_width; i++) vector[i] = 0; //ArrayList arr = new ArrayList(); final String[] tokens = strVal.split(" "); for(i = 0; i < tokens.length; i += 2) { short row = Short.parseShort(tokens[i]); double val = Double.parseDouble(tokens[i+1]); vector[row] = val; } return vector; } // convert double[] to String // strVal is (ROW-ID VALUE)s. ex) 0 0.5 1 0.3 public static String encodeBlockVector(double[] vec, int block_width) { int i; String result = ""; for(i=0; i< block_width; i++) { if( vec[i] != 0 ) { if( result.length() > 0 ) result += " "; result += ("" + i + " " + vec[i]); } } return result; } } PEGASUS/uniq.py0000755000000000000000000000070511443145611012216 0ustar rootroot#!/usr/bin/python # uniq.py import sys import os cmptid=-1 count=0 key=-1 line="" sys.stdout.softspace=False; for cur_line in sys.stdin: cur_line = cur_line.strip() if(cur_line==""): if(count>0): print line,"\t",count count=0 line="" if(cur_line != line): if(count>0): print line, "\t", count sys.stdout.flush() line=cur_line count = 1 else: count += 1 if(count>0): print line,"\t",count