@@ -11,10 +11,18 @@ namespace ServiceLayer.Mesh.Tests.FileTypes.NbssAppointmentEvents;
1111public class FileParserTests
1212{
1313 private readonly FileParser _fileParser ;
14+ private readonly string _testDataPath ;
1415
1516 public FileParserTests ( )
1617 {
1718 _fileParser = new FileParser ( ) ;
19+ _testDataPath = Path . Combine ( AppContext . BaseDirectory , "TestData" ) ;
20+ }
21+
22+ private FileStream GetTestFileStream ( string fileName )
23+ {
24+ string filePath = Path . Combine ( _testDataPath , fileName ) ;
25+ return File . OpenRead ( filePath ) ;
1826 }
1927
2028 [ Fact ]
@@ -49,13 +57,7 @@ public void Parse_EmptyStream_ReturnsEmptyParsedFile()
4957 public void Parse_ValidFile_ReturnsParsedFileWithCorrectStructure ( )
5058 {
5159 // Arrange
52-
53- var testFile = Path . Combine (
54- AppContext . BaseDirectory ,
55- "TestData" ,
56- "TestFile1.csv" ) ;
57-
58- using var fileStream = File . OpenRead ( testFile ) ;
60+ using var fileStream = GetTestFileStream ( "TestFile1.csv" ) ;
5961
6062 // Act
6163 var result = _fileParser . Parse ( fileStream ) ;
@@ -96,12 +98,7 @@ public void Parse_ValidFile_ReturnsParsedFileWithCorrectStructure()
9698 public void Parse_CompleteDataset_ParsesAllFieldsCorrectly ( )
9799 {
98100 // Arrange
99- var completeDatasetPath = Path . Combine (
100- AppContext . BaseDirectory ,
101- "TestData" ,
102- "CompleteDataset.csv" ) ;
103-
104- using var fileStream = File . OpenRead ( completeDatasetPath ) ;
101+ using var fileStream = GetTestFileStream ( "CompleteDataset.csv" ) ;
105102
106103 // Act
107104 var result = _fileParser . Parse ( fileStream ) ;
@@ -150,12 +147,7 @@ public void Parse_CompleteDataset_ParsesAllFieldsCorrectly()
150147 public void Parse_MissingFieldsRecord_ThrowsInvalidOperationException ( )
151148 {
152149 // Arrange
153- var missingFieldsPath = Path . Combine (
154- AppContext . BaseDirectory ,
155- "TestData" ,
156- "MissingFields.csv" ) ;
157-
158- using var fileStream = File . OpenRead ( missingFieldsPath ) ;
150+ using var fileStream = GetTestFileStream ( "MissingFields.csv" ) ;
159151
160152 // Act & Assert
161153 var exception = Assert . Throws < InvalidOperationException > ( ( ) => _fileParser . Parse ( fileStream ) ) ;
@@ -167,16 +159,10 @@ public void Parse_MissingFieldsRecord_ThrowsInvalidOperationException()
167159 public void Parse_UnknownRecordType_ThrowsInvalidOperationException ( )
168160 {
169161 // Arrange
170- var unknownRecordPath = Path . Combine (
171- AppContext . BaseDirectory ,
172- "TestData" ,
173- "UnknownRecord.csv" ) ;
174-
175- using var fileStream = File . OpenRead ( unknownRecordPath ) ;
162+ using var fileStream = GetTestFileStream ( "UnknownRecord.csv" ) ;
176163
177164 // Act & Assert
178- var exception = Assert . Throws < InvalidOperationException > (
179- ( ) => _fileParser . Parse ( fileStream ) ) ;
165+ var exception = Assert . Throws < InvalidOperationException > ( ( ) => _fileParser . Parse ( fileStream ) ) ;
180166
181167 Assert . Equal ( "Unknown record identifier: UNKNOWN_TYPE" , exception . Message ) ;
182168 }
@@ -185,12 +171,7 @@ public void Parse_UnknownRecordType_ThrowsInvalidOperationException()
185171 public void Parse_EmptyLine_SkipsEmptyLines ( )
186172 {
187173 // Arrange
188- var emptyLinesPath = Path . Combine (
189- AppContext . BaseDirectory ,
190- "TestData" ,
191- "EmptyLines.csv" ) ;
192-
193- using var fileStream = File . OpenRead ( emptyLinesPath ) ;
174+ using var fileStream = GetTestFileStream ( "EmptyLines.csv" ) ;
194175
195176 // Act
196177 var result = _fileParser . Parse ( fileStream ) ;
@@ -205,12 +186,7 @@ public void Parse_EmptyLine_SkipsEmptyLines()
205186 public void Parse_FewerColumnsInDataRecord_OnlyProcessesAvailableColumns ( )
206187 {
207188 // Arrange
208- var fewerColumnsPath = Path . Combine (
209- AppContext . BaseDirectory ,
210- "TestData" ,
211- "FewerColumns.csv" ) ;
212-
213- using var fileStream = File . OpenRead ( fewerColumnsPath ) ;
189+ using var fileStream = GetTestFileStream ( "FewerColumns.csv" ) ;
214190
215191 // Act
216192 var result = _fileParser . Parse ( fileStream ) ;
@@ -236,12 +212,7 @@ public void Parse_FewerColumnsInDataRecord_OnlyProcessesAvailableColumns()
236212 public void Parse_ExtraColumnsInDataRecord_IgnoresExtraColumns ( )
237213 {
238214 // Arrange
239- var extraColumnsPath = Path . Combine (
240- AppContext . BaseDirectory ,
241- "TestData" ,
242- "ExtraColumns.csv" ) ;
243-
244- using var fileStream = File . OpenRead ( extraColumnsPath ) ;
215+ using var fileStream = GetTestFileStream ( "ExtraColumns.csv" ) ;
245216
246217 // Act
247218 var result = _fileParser . Parse ( fileStream ) ;
@@ -266,12 +237,7 @@ public void Parse_ExtraColumnsInDataRecord_IgnoresExtraColumns()
266237 public void Parse_QuotedValues_TrimsQuotes ( )
267238 {
268239 // Arrange
269- var quotedValuesPath = Path . Combine (
270- AppContext . BaseDirectory ,
271- "TestData" ,
272- "QuotedValues.csv" ) ;
273-
274- using var fileStream = File . OpenRead ( quotedValuesPath ) ;
240+ using var fileStream = GetTestFileStream ( "QuotedValues.csv" ) ;
275241
276242 // Act
277243 var result = _fileParser . Parse ( fileStream ) ;
@@ -295,12 +261,7 @@ public void Parse_QuotedValues_TrimsQuotes()
295261 public void Parse_WithEscapedCharacters_HandlesCorrectly ( )
296262 {
297263 // Arrange
298- var escapedCharsPath = Path . Combine (
299- AppContext . BaseDirectory ,
300- "TestData" ,
301- "EscapedChars.csv" ) ;
302-
303- using var fileStream = File . OpenRead ( escapedCharsPath ) ;
264+ using var fileStream = GetTestFileStream ( "EscapedChars.csv" ) ;
304265
305266 // Act
306267 var result = _fileParser . Parse ( fileStream ) ;
@@ -324,30 +285,14 @@ public void Parse_WithEscapedCharacters_HandlesCorrectly()
324285 public void VerifyFileHeaderRecordMap_MapsCorrectly ( )
325286 {
326287 // Arrange
327- var headerMappingPath = Path . Combine (
328- AppContext . BaseDirectory ,
329- "TestData" ,
330- "HeaderMapping.csv" ) ;
331-
332- using var fileStream = File . OpenRead ( headerMappingPath ) ;
333-
334- // Act & Assert - setup a CSV reader with proper configuration to verify the class map works
335- using var reader = new StreamReader ( fileStream ) ;
336- var config = new CsvConfiguration ( CultureInfo . InvariantCulture )
337- {
338- Delimiter = "|" ,
339- Quote = '"' ,
340- Escape = '\\ ' ,
341- HasHeaderRecord = false ,
342- Mode = CsvMode . RFC4180
343- } ;
288+ using var reader = CreateConfiguredCsvReader ( "HeaderMapping.csv" ) ;
289+ reader . Context . RegisterClassMap < FileHeaderRecordMap > ( ) ;
344290
345- using var csv = new CsvHelper . CsvReader ( reader , config ) ;
346- csv . Context . RegisterClassMap < FileHeaderRecordMap > ( ) ;
347- csv . Read ( ) ;
348- var result = csv . GetRecord < FileHeaderRecord > ( ) ;
291+ // Act
292+ reader . Read ( ) ;
293+ var result = reader . GetRecord < FileHeaderRecord > ( ) ;
349294
350- // Verify mapping worked correctly
295+ // Assert
351296 Assert . Equal ( "NBSSAPPT_HDR" , result . RecordTypeIdentifier ) ;
352297 Assert . Equal ( "00000054" , result . ExtractId ) ;
353298 Assert . Equal ( "20250204" , result . TransferStartDate ) ;
@@ -359,15 +304,25 @@ public void VerifyFileHeaderRecordMap_MapsCorrectly()
359304 public void VerifyFileTrailerRecordMap_MapsCorrectly ( )
360305 {
361306 // Arrange
362- var trailerMappingPath = Path . Combine (
363- AppContext . BaseDirectory ,
364- "TestData" ,
365- "TrailerMapping.csv" ) ;
307+ using var reader = CreateConfiguredCsvReader ( "TrailerMapping.csv" ) ;
308+ reader . Context . RegisterClassMap < FileTrailerRecordMap > ( ) ;
366309
367- using var fileStream = File . OpenRead ( trailerMappingPath ) ;
310+ // Act
311+ reader . Read ( ) ;
312+ var result = reader . GetRecord < FileTrailerRecord > ( ) ;
368313
369- // Act & Assert - setup a CSV reader with proper configuration to verify the class map works
370- using var reader = new StreamReader ( fileStream ) ;
314+ // Assert
315+ Assert . Equal ( "NBSSAPPT_END" , result . RecordTypeIdentifier ) ;
316+ Assert . Equal ( "00000054" , result . ExtractId ) ;
317+ Assert . Equal ( "20250204" , result . TransferEndDate ) ;
318+ Assert . Equal ( "161846" , result . TransferEndTime ) ;
319+ Assert . Equal ( "000002" , result . RecordCount ) ;
320+ }
321+
322+ // Helper methods
323+ private CsvReader CreateConfiguredCsvReader ( string fileName )
324+ {
325+ var streamReader = new StreamReader ( GetTestFileStream ( fileName ) ) ;
371326 var config = new CsvConfiguration ( CultureInfo . InvariantCulture )
372327 {
373328 Delimiter = "|" ,
@@ -377,17 +332,7 @@ public void VerifyFileTrailerRecordMap_MapsCorrectly()
377332 Mode = CsvMode . RFC4180
378333 } ;
379334
380- using var csv = new CsvHelper . CsvReader ( reader , config ) ;
381- csv . Context . RegisterClassMap < FileTrailerRecordMap > ( ) ;
382- csv . Read ( ) ;
383- var result = csv . GetRecord < FileTrailerRecord > ( ) ;
384-
385- // Verify mapping worked correctly
386- Assert . Equal ( "NBSSAPPT_END" , result . RecordTypeIdentifier ) ;
387- Assert . Equal ( "00000054" , result . ExtractId ) ;
388- Assert . Equal ( "20250204" , result . TransferEndDate ) ;
389- Assert . Equal ( "161846" , result . TransferEndTime ) ;
390- Assert . Equal ( "000002" , result . RecordCount ) ;
335+ return new CsvReader ( streamReader , config ) ;
391336 }
392337
393338 private static MemoryStream CreateStreamFromString ( string content )
@@ -423,8 +368,8 @@ private static void VerifyFileTrailerRecord(
423368 Assert . NotNull ( record ) ;
424369 Assert . Equal ( recordType , record . RecordTypeIdentifier ) ;
425370 Assert . Equal ( extractId , record . ExtractId ) ;
426- Assert . Equal ( date , record . TransferEndDate ?? date ) ;
427- Assert . Equal ( time , record . TransferEndTime ?? time ) ;
371+ Assert . Equal ( date , record . TransferEndDate ) ;
372+ Assert . Equal ( time , record . TransferEndTime ) ;
428373 Assert . Equal ( count , record . RecordCount ) ;
429374 }
430375
0 commit comments