@@ -637,3 +637,184 @@ describe('HTTPClient', function() {
637637 } ) ;
638638 } ) ;
639639} ) ;
640+
641+ describe ( 'Log Splitting' , function ( ) {
642+ function setUpWithLogSplittingConfig ( config ) {
643+ const forwarder = new client . EventhubLogHandler ( fakeContext ( ) ) ;
644+
645+ // Mock the log splitting configuration
646+ forwarder . logSplittingConfig = config ;
647+
648+ // Mock addTagsToJsonLog to set ddsource for testing
649+ forwarder . addTagsToJsonLog = x => {
650+ return Object . assign ( {
651+ ddsource : 'azure.datafactory' ,
652+ ddsourcecategory : 'azure' ,
653+ service : 'azure' ,
654+ ddtags : 'forwardername:testFunctionName'
655+ } , x ) ;
656+ } ;
657+
658+ forwarder . addTagsToStringLog = x => {
659+ return {
660+ ddsource : 'azure.datafactory' ,
661+ ddsourcecategory : 'azure' ,
662+ service : 'azure' ,
663+ ddtags : 'forwardername:testFunctionName' ,
664+ message : x
665+ } ;
666+ } ;
667+
668+ return forwarder ;
669+ }
670+
671+ describe ( '#logSplitting with azure.datafactory configuration' , function ( ) {
672+ beforeEach ( function ( ) {
673+ this . testConfig = {
674+ 'azure.datafactory' : {
675+ paths : [ [ 'properties' , 'Output' , 'value' ] ] ,
676+ keep_original_log : true ,
677+ preserve_fields : false
678+ }
679+ } ;
680+ this . forwarder = setUpWithLogSplittingConfig ( this . testConfig ) ;
681+ } ) ;
682+
683+ it ( 'should split logs with correct field structure due' , function ( ) {
684+ const inputLog = {
685+ resourceId : '/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.DataFactory/factories/test-factory' ,
686+ properties : {
687+ Output : {
688+ value : [
689+ { id : 1 , name : 'item1' , status : 'success' } ,
690+ { id : 2 , name : 'item2' , status : 'failed' } ,
691+ { id : 3 , name : 'item3' , status : 'success' }
692+ ]
693+ }
694+ } ,
695+ timestamp : '2023-01-01T00:00:00Z' ,
696+ category : 'PipelineRuns'
697+ } ;
698+
699+ const result = this . forwarder . handleLogs ( inputLog ) ;
700+
701+ assert . equal ( result . length , 4 ) ; // 3 split logs + 1 original
702+ assert . equal ( result [ 0 ] . ddsource , 'azure.datafactory' ) ;
703+ assert . ok ( result [ 0 ] . parsed_arrays ) ;
704+
705+ // the first 3 logs should be split from the array
706+ for ( let i = 0 ; i < result . length - 1 ; i ++ ) {
707+ assert . equal ( result [ i ] . ddsource , 'azure.datafactory' ) ;
708+ assert . ok ( ! Array . isArray ( result [ i ] . parsed_arrays . properties [ 'Output' ] [ 'value' ] ) ) ;
709+ }
710+
711+ // The last log should be the original log
712+ assert . equal ( result [ 3 ] . ddsource , 'azure.datafactory' ) ;
713+ assert . ok ( Array . isArray ( result [ 3 ] . properties [ 'Output' ] [ 'value' ] ) ) ;
714+ } ) ;
715+
716+ it ( 'should preserve original log when path does not exist' , function ( ) {
717+ const inputLog = {
718+ properties : {
719+ SomeOtherField : 'value'
720+ }
721+ } ;
722+
723+ const result = this . forwarder . handleLogs ( inputLog ) ;
724+
725+ // Should only have 1 log (original) since path doesn't exist
726+ assert . equal ( result . length , 1 ) ;
727+ assert . equal ( result [ 0 ] . ddsource , 'azure.datafactory' ) ;
728+ assert . equal ( result [ 0 ] . properties . SomeOtherField , 'value' ) ;
729+ } ) ;
730+
731+ it ( 'should handle null/undefined values in path gracefully' , function ( ) {
732+ const inputLog = {
733+ properties : {
734+ Output : null
735+ }
736+ } ;
737+
738+ const result = this . forwarder . handleLogs ( inputLog ) ;
739+
740+ // Should only have 1 log (original) since path leads to null
741+ assert . equal ( result . length , 1 ) ;
742+ assert . equal ( result [ 0 ] . ddsource , 'azure.datafactory' ) ;
743+ assert . equal ( result [ 0 ] . properties . Output , null ) ;
744+ } ) ;
745+ } ) ;
746+
747+ describe ( '#logSplitting with non-datafactory source' , function ( ) {
748+ beforeEach ( function ( ) {
749+ this . testConfig = {
750+ 'azure.datafactory' : {
751+ paths : [ [ 'properties' , 'Output' , 'value' ] ] ,
752+ keep_original_log : true ,
753+ preserve_fields : true
754+ }
755+ } ;
756+ this . forwarder = setUpWithLogSplittingConfig ( this . testConfig ) ;
757+
758+ // Override to return different source
759+ this . forwarder . addTagsToJsonLog = x => {
760+ return Object . assign ( {
761+ ddsource : 'azure.storage' ,
762+ ddsourcecategory : 'azure' ,
763+ service : 'azure' ,
764+ ddtags : 'forwardername:testFunctionName'
765+ } , x ) ;
766+ } ;
767+ } ) ;
768+
769+ it ( 'should not split logs from other sources' , function ( ) {
770+ const inputLog = {
771+ properties : {
772+ Output : {
773+ value : [
774+ { id : 1 , name : 'item1' } ,
775+ { id : 2 , name : 'item2' }
776+ ]
777+ }
778+ }
779+ } ;
780+
781+ const result = this . forwarder . handleLogs ( inputLog ) ;
782+
783+ // Should only have 1 log (original) since source doesn't match config
784+ assert . equal ( result . length , 1 ) ;
785+ assert . equal ( result [ 0 ] . ddsource , 'azure.storage' ) ;
786+ assert . ok ( ! result [ 0 ] . parsed_arrays ) ;
787+ } ) ;
788+ } ) ;
789+
790+ describe ( '#findSplitRecords method behavior' , function ( ) {
791+ beforeEach ( function ( ) {
792+ this . forwarder = new client . EventhubLogHandler ( fakeContext ( ) ) ;
793+ } ) ;
794+
795+ it ( 'should return an object with the field at the end of of the chain in fields' , function ( ) {
796+ const value = [ 1 , 2 , 3 ] ;
797+ const fields = [ 'properties' , 'Output' , 'value' ] ;
798+ const record = {
799+ properties : {
800+ Output : {
801+ value : value
802+ }
803+ }
804+ } ;
805+
806+ const result = this . forwarder . findSplitRecords ( record , fields ) ;
807+ assert . equal ( result , value ) ;
808+ } ) ;
809+
810+ it ( 'should return null when path leads to null/undefined' , function ( ) {
811+ const fields = [ 'properties' , 'Output' , 'value' ] ;
812+ const record = {
813+ 0 : null
814+ } ;
815+
816+ const result = this . forwarder . findSplitRecords ( record , fields ) ;
817+ assert . equal ( result , null ) ;
818+ } ) ;
819+ } ) ;
820+ } ) ;
0 commit comments