Obviously, this question has been asked many times. The normal procedure:
Workbooks.Open (ActiveWorkbook.Path & "\Test.csv")
won't parse the CSV correctly (having many rows in one cell)
Thanks to Lernkurve, I can use his function to get it right: Opening semicolon delimited CSV file
Sub ImportCSVFile(filepath As String)
Dim line As String
Dim arrayOfElements
Dim linenumber As Integer
Dim elementnumber As Integer
Dim element As Variant
linenumber = 0
elementnumber = 0
Open filepath For Input As #1 ' Open file for input
Do While Not EOF(1) ' Loop until end of file
linenumber = linenumber + 1
Line Input #1, line
arrayOfElements = Split(line, ";")
elementnumber = 0
For Each element In arrayOfElements
elementnumber = elementnumber + 1
Cells(linenumber, elementnumber).Value = element
Next
Loop
Close #1 ' Close file.
End Sub
This however is not fast (I have files with thousands of columns) and my question is:
Is there any native way to open CSV files in Excel with right parsing?
Workbooks.Open does work too.
Workbooks.Open ActiveWorkbook.Path & "\Temp.csv", Local:=True
this works/is needed because i use Excel in germany and excel does use "," to separate .csv by default because i use an english installation of windows. even if you use the code below excel forces the "," separator.
Workbooks.Open ActiveWorkbook.Path & "\Test.csv", , , 6, , , , , ";"
and Workbooks.Open ActiveWorkbook.Path & "\Temp.csv", , , 4 +variants of this do not work(!)
why do they even have the delimiter parameter if it is blocked by the Local parameter ?! this makes no sense at all. but now it works.
This function reads a CSV file of 15MB and copies its content into a sheet in about 3 secs.
What is probably taking a lot of time in your code is the fact that you copy data cell by cell instead of putting the whole content at once.
Option Explicit
Public Sub test()
copyDataFromCsvFileToSheet "C:\temp\test.csv", ",", "Sheet1"
End Sub
Private Sub copyDataFromCsvFileToSheet(parFileName As String, parDelimiter As String, parSheetName As String)
Dim data As Variant
data = getDataFromFile(parFileName, parDelimiter)
If Not isArrayEmpty(data) Then
With Sheets(parSheetName)
.Cells.ClearContents
.Cells(1, 1).Resize(UBound(data, 1), UBound(data, 2)) = data
End With
End If
End Sub
Public Function isArrayEmpty(parArray As Variant) As Boolean
'Returns false if not an array or dynamic array that has not been initialised (ReDim) or has been erased (Erase)
If IsArray(parArray) = False Then isArrayEmpty = True
On Error Resume Next
If UBound(parArray) < LBound(parArray) Then isArrayEmpty = True: Exit Function Else: isArrayEmpty = False
End Function
Private Function getDataFromFile(parFileName As String, parDelimiter As String, Optional parExcludeCharacter As String = "") As Variant
'parFileName is supposed to be a delimited file (csv...)
'parDelimiter is the delimiter, "," for example in a comma delimited file
'Returns an empty array if file is empty or can't be opened
'number of columns based on the line with the largest number of columns, not on the first line
'parExcludeCharacter: sometimes csv files have quotes around strings: "XXX" - if parExcludeCharacter = """" then removes the quotes
Dim locLinesList() As Variant
Dim locData As Variant
Dim i As Long
Dim j As Long
Dim locNumRows As Long
Dim locNumCols As Long
Dim fso As Variant
Dim ts As Variant
Const REDIM_STEP = 10000
Set fso = CreateObject("Scripting.FileSystemObject")
On Error GoTo error_open_file
Set ts = fso.OpenTextFile(parFileName)
On Error GoTo unhandled_error
'Counts the number of lines and the largest number of columns
ReDim locLinesList(1 To 1) As Variant
i = 0
Do While Not ts.AtEndOfStream
If i Mod REDIM_STEP = 0 Then
ReDim Preserve locLinesList(1 To UBound(locLinesList, 1) + REDIM_STEP) As Variant
End If
locLinesList(i + 1) = Split(ts.ReadLine, parDelimiter)
j = UBound(locLinesList(i + 1), 1) 'number of columns
If locNumCols < j Then locNumCols = j
i = i + 1
Loop
ts.Close
locNumRows = i
If locNumRows = 0 Then Exit Function 'Empty file
ReDim locData(1 To locNumRows, 1 To locNumCols + 1) As Variant
'Copies the file into an array
If parExcludeCharacter <> "" Then
For i = 1 To locNumRows
For j = 0 To UBound(locLinesList(i), 1)
If Left(locLinesList(i)(j), 1) = parExcludeCharacter Then
If Right(locLinesList(i)(j), 1) = parExcludeCharacter Then
locLinesList(i)(j) = Mid(locLinesList(i)(j), 2, Len(locLinesList(i)(j)) - 2) 'If locTempArray = "", Mid returns ""
Else
locLinesList(i)(j) = Right(locLinesList(i)(j), Len(locLinesList(i)(j)) - 1)
End If
ElseIf Right(locLinesList(i)(j), 1) = parExcludeCharacter Then
locLinesList(i)(j) = Left(locLinesList(i)(j), Len(locLinesList(i)(j)) - 1)
End If
locData(i, j + 1) = locLinesList(i)(j)
Next j
Next i
Else
For i = 1 To locNumRows
For j = 0 To UBound(locLinesList(i), 1)
locData(i, j + 1) = locLinesList(i)(j)
Next j
Next i
End If
getDataFromFile = locData
Exit Function
error_open_file: 'returns empty variant
unhandled_error: 'returns empty variant
End Function
This may help you, also it depends how your CSV file is formated.
Open your excel sheet & go to menu Data > Import External Data > Import Data.
Choose your CSV file.
Original data type: choose Fixed width, then Next.
It will autmaticall delimit your columns. then, you may check the splitted columns in Data preview panel.
Then Finish & see.
Note: you may also go with Delimited as Original data type.
In that case, you need to key-in your delimiting character.
HTH!
Have you tried the import text function.
I have the same issue, I'm not able to open a CSV file in Excel. I've found a solution that worked for me in this question Opening a file in excel via Workbooks.OpenText
That question helped me to figure out a code that works for me. The code looks more or less like this:
Private Sub OpenCSVFile(filename as String)
Dim datasourceFilename As String
Dim currentPath As String
datasourceFilename = "\" & filename & ".csv"
currentPath = ActiveWorkbook.Path
Workbooks.OpenText Filename:=currentPath & datasourceFilename, _
Origin:=xlWindows, _
StartRow:=1, _
DataType:=xlDelimited, _
TextQualifier:=xlDoubleQuote, _
ConsecutiveDelimiter:=False, _
Tab:=False, _
Semicolon:=False, _
Comma:=True, _
Space:=False, _
Other:=False, _
FieldInfo:=Array(Array(1, 1), Array(2, 1)), _
DecimalSeparator:=".", _
ThousandsSeparator:=",", _
TrailingMinusNumbers:=True
End Sub
At least, it helped me to know about lots of parameters I can use with Workbooks.OpenText method.
Sometimes all the solutions with Workbooks.open is not working no matter how many parameters are set.
For me, the fastest solution was to change the List separator in Region & language settings.
Region window / Additional settings... / List separator.
If csv is not opening in proper way You probly have set ',' as a list separator. Just change it to ';' and everything is solved.
Just the easiest way when "everything is against You" :P
I just solve (my) problem by setting the Local parameter in the Workbook.Open method like this:
xtrct_wb = Workbooks.Open(filePath, Local:=True)
Every information is then in its proper column. Hope it works for you as well.
Related
VBA newbie here.
I have a VBA macro which is designed to create a data table on a named range, paste the data table as values and then export the data table to a .txt file. The problem I have is that each time I run the macro it takes significantly longer to run than the previous time. If I restart Excel, however, the run time "resets" and becomes low again. Once or twice I have even received an error message that Excel has run out of resources. Any help would be greatly appreciated!
Here is the macro:
Sub PR_Calculate()
'
' Total Macro
'
Application.ScreenUpdating = False
Range("Output").Clear
Range("CurrentOutput").Table ColumnInput:=Range("CurrentOutput").Cells(1, 1) 'apply data table to required range
Range("Output").Font.Size = 8
Range("Output").Font.Name = "Segoe UI"
Application.Calculation = xlCalculationAutomatic
Application.Calculation = xlCalculationSemiautomatic
Range("Output").Copy
Range("Output").PasteSpecial xlPasteValues
Application.CutCopyMode = False
Dim outputPath1 As String
Dim outputPath2 As String
outputPath1 = ActiveWorkbook.Worksheets("Run Setup").Range("OutputPath") & Range("CurrentRunParameters").Cells(2, 1).Value & "." & Range("CurrentRunParameters").Cells(2, 2).Value & ".txt"
outputPath2 = ActiveWorkbook.Worksheets("Run Setup").Range("OutputPath") & Range("CurrentRunParameters").Cells(2, 1).Value & "." & Range("CurrentRunParameters").Cells(2, 2).Value & ".Headings.txt"
Call ExportRange(ActiveWorkbook.Worksheets("Policy Results").Range("FileSaveRange"), outputPath1, ",") 'call function to export results to .txt file
Call ExportRange(ActiveWorkbook.Worksheets("Policy Results").Range("HeadingSaveRange"), outputPath2, ",") 'call function to export results to .txt file
End Sub
Function ExportRange(WhatRange As Range, _
Where As String, Delimiter As String) As String
Dim HoldRow As Long 'test for new row variable
HoldRow = WhatRange.Row
Dim c As Range
'loop through range variable
For Each c In WhatRange
If HoldRow <> c.Row Then
'add linebreak and remove extra delimeter
ExportRange = Left(ExportRange, Len(ExportRange) - 1) _
& vbCrLf & c.Text & Delimiter
HoldRow = c.Row
Else
ExportRange = ExportRange & c.Text & Delimiter
End If
Next c
'Trim extra delimiter
ExportRange = Left(ExportRange, Len(ExportRange) - 1)
'Kill the file if it already exists
If Len(Dir(Where)) > 0 Then
Kill Where
End If
Open Where For Append As #1 'write the new file
Print #1, ExportRange
Close #1
End Function
I've tried removing sections of the code piece by piece but it always seems to slow down after consecutive runs.
So, you have a function ExportRange as a string but call it as a subroutine while using the function ExportRange variable in the function...whose value seems to/could get larger and larger each time it's run. I would try not using the function as a local variable for itself, use a Dim String instead. If you need a global variable for it, then declare it outside the function.
Something like this:
Dim MyExportRange As String
Sub ExportRange(WhatRange As Range, _
Where As String, Delimiter As String)
Dim HoldRow As Long 'test for new row variable
HoldRow = WhatRange.Row
Dim c As Range
MyExportRange = ""
'loop through range variable
For Each c In WhatRange
If HoldRow <> c.Row Then
'add linebreak and remove extra delimeter
MyExportRange = Left(MyExportRange, Len(MyExportRange) - 1) _
& vbCrLf & c.Text & Delimiter
HoldRow = c.Row
Else
MyExportRange = MyExportRange & c.Text & Delimiter
End If
Next c
'Trim extra delimiter
MyExportRange = Left(MyExportRange, Len(MyExportRange) - 1)
'Kill the file if it already exists
If Len(Dir(Where)) > 0 Then
Kill Where
End If
Open Where For Append As #1 'write the new file
Print #1, MyExportRange
Close #1
End Sub
Looping cell-by-cell through a range can be slow, so you could try reading the whole range into an array and then writing the file from that:
Sub tester()
ExportRange ActiveSheet.Range("A1").CurrentRegion, "C:\Temp\Test56.txt", "," '
End Sub
Sub ExportRange(WhatRange As Range, Where As String, Delimiter As String)
Dim arr, r As Long, c As Long, sep As String, s As String, ff
If Len(Dir(Where)) > 0 Then Kill Where 'kill file if already exists
ff = FreeFile
Open Where For Output As #ff 'not appending...
If WhatRange.Cells.Count > 1 Then
arr = WhatRange.Value
For r = 1 To UBound(arr, 1)
s = ""
sep = ""
For c = 1 To UBound(arr, 2)
s = s & sep & arr(r, c)
sep = Delimiter
Next c
Print #ff, s
Next r
Else
Print #ff, WhatRange.Value 'only one cell
End If
Close #ff
End Sub
Your support is really appreciated!
I am receiving a .dat file from a measuring tool, which is found hard to get in to excel.
I would like to do it without power query as well.
I do this in steps:
Step 1; convert dat file to "csv/txt" by removing duplicate spaces and replacing spaces with ";", also replacing "." with ",".
I would like to keep this format as several other tools tends to use similar format.
And from this I thought it would be fairly ok to import it, however...
First row of 11000 rows of .dat file:
1 1 -0.4200 -0.0550 0.1420 173 174 181 56.3 55.5 59.3 87 84 95 0.778 0 0 0
first row of the converted file, all rows below looks good as well.
1;1;-0,4260;-0,1500;0,0990;171;168;176;55,5;53,8;57,6;96;83;82;4,794;0;0;0
if I import this file with power query it seems ok.
Step 2:
When importing it with the code below, following occurs on line 660
from txt file
1;660;-1,0210;-0,0340;0,0470;169;164;176;54,6;51,2;57,2;15;96;63;0,782;0;0;0
from excel:
Debuging the shows following:
file:
format of the cell is "Numbers" and not "geeral" as for other numbers.
This seems to occure now and then, and typically when the number goes above -1,xx.
Code is found online, and is fairly quick.
I suspect that something happens when putting the two-dimensional variant array into the sheet
Dim Data As Variant 'Array for the file values
.
.
.
.
With Sheets(parSheetName)
'Delete any old content
.cells.ClearContents
'A range gets the same dimensions as the array
'and the array values are inserted in one operation.
.cells(4, 1).Resize(UBound(Data, 1), UBound(Data, 2)) = Data
End With
End If
Option Explicit
'**************************************************************
' Imports CSV to sheet, following the generated numbers will be placed in a table.
'**************************************************************
Public Sub copyDataFromCsvFileToSheet(parFileName As String, _
parDelimiter As String, parSheetName As String)
Dim Data As Variant 'Array for the file values
Dim I As Long
Dim J As Long
Dim prt As String
'Function call - the file is read into the array
Data = getDataFromFile(parFileName, parDelimiter)
'If the array isn't empty it is inserted into
'the sheet in one swift operation.
If Not isArrayEmpty(Data) Then
'If you want to operate directly on the array,
'you can leave out the following lines.
ActiveWorkbook.Worksheets.Add(After:=Sheets(Sheets.Count)).Name = parSheetName
'For I = 1 To 1000 'UBound(Data, 1)
'For J = 1 To 18 'UBound(Data, 2)
''prt = Data(I, J)
''Debug.Print prt
''ThisWorkbook.Worksheets(parSheetName).cells(I, J) = Data(I, J)
'Next J
'Next I
'Debug.Print "done"
'End If
With Sheets(parSheetName)
'Delete any old content
.cells.ClearContents
'A range gets the same dimensions as the array
'and the array values are inserted in one operation.
.cells(4, 1).Resize(UBound(Data, 1), UBound(Data, 2)) = Data
End With
End If
'Call sbCreatTable(parSheetName)
End Sub
'**************************************************************
Private Function isArrayEmpty(parArray As Variant) As Boolean
'Returns False if not an array or a dynamic array
'that hasn't been initialised (ReDim) or
'deleted (Erase).
If IsArray(parArray) = False Then isArrayEmpty = True
On Error Resume Next
If UBound(parArray) < LBound(parArray) Then
isArrayEmpty = True
Exit Function
Else
isArrayEmpty = False
End If
End Function
Private Function getDataFromFile(parFileName As String, _
parDelimiter As String, _
Optional parExcludeCharacter As String = "") As Variant
'parFileName is the delimited file (csv, txt ...)
'parDelimiter is the separator, e.g. semicolon.
'The function returns an empty array, if the file
'is empty or cannot be opened.
'Number of columns is based on the line with most
'columns and not the first line.
'parExcludeCharacter: Some csv files have strings in
'quotations marks ("ABC"), and if parExcludeCharacter = """"
'quotation marks are removed.
Dim locLinesList() As Variant 'Array
Dim locData As Variant 'Array
Dim I As Long 'Counter
Dim J As Long 'Counter
Dim locNumRows As Long 'Nb of rows
Dim locNumCols As Long 'Nb of columns
Dim fso As Variant 'File system object
Dim ts As Variant 'File variable
Const REDIM_STEP = 10000 'Constant
'If this fails you need to reference Microsoft Scripting Runtime.
'You select this in "Tools" (VBA editor menu).
Set fso = CreateObject("Scripting.FileSystemObject")
On Error GoTo error_open_file
'Sets ts = the file
Set ts = fso.OpenTextFile(parFileName)
On Error GoTo unhandled_error
'Initialise the array
ReDim locLinesList(1 To 1) As Variant
I = 0
'Loops through the file, counts the number of lines (rows)
'and finds the highest number of columns.
Do While Not ts.AtEndOfStream
'If the row number Mod 10000 = 0
'we redimension the array.
If I Mod REDIM_STEP = 0 Then
ReDim Preserve locLinesList _
(1 To UBound(locLinesList, 1) + REDIM_STEP) As Variant
End If
locLinesList(I + 1) = Split(ts.ReadLine, parDelimiter)
J = UBound(locLinesList(I + 1), 1) 'Nb of columns in present row
'If the number of columns is then highest so far.
'the new number is saved.
If locNumCols < J Then locNumCols = J
I = I + 1
Loop
ts.Close 'Close file
locNumRows = I
'If number of rows is zero
If locNumRows = 0 Then Exit Function
ReDim locData(1 To locNumRows, 1 To locNumCols + 1) As Variant
'Copies the file values into an array.
'If parExcludeCharacter has a value,
'the characters are removed.
If parExcludeCharacter <> "" Then
For I = 1 To locNumRows
For J = 0 To UBound(locLinesList(I), 1)
If Left(locLinesList(I)(J), 1) = parExcludeCharacter Then
If Right(locLinesList(I)(J), 1) = parExcludeCharacter Then
locLinesList(I)(J) = _
Mid(locLinesList(I)(J), 2, Len(locLinesList(I)(J)) - 2)
Else
locLinesList(I)(J) = _
Right(locLinesList(I)(J), Len(locLinesList(I)(J)) - 1)
End If
ElseIf Right(locLinesList(I)(J), 1) = parExcludeCharacter Then
locLinesList(I)(J) = _
Left(locLinesList(I)(J), Len(locLinesList(I)(J)) - 1)
End If
locData(I, J + 1) = locLinesList(I)(J)
Next J
Next I
Else
For I = 1 To locNumRows
For J = 0 To UBound(locLinesList(I), 1)
locData(I, J + 1) = locLinesList(I)(J)
Next J
Next I
End If
getDataFromFile = locData
Exit Function
error_open_file: 'Returns empty Variant
unhandled_error: 'Returns empty Variant
End Function
Due to mentioned several measuring tools, the power query is un suited, and the control is better when using the ole way of doing it.
Solution:
Setting the variant to decimal when building the array
CDec(locLinesList(I)(J))
Thanks for your responce!
VBA Newbie:- I have large csv file . I would like to read only 5000 lines from it and import it into my excel using VBA.
I tried following code . It opens the file but I cant seem to find a way to import only 5000 rows
Sub importcsvfile()
Dim WS As Worksheet, strFile As String
Set WS = ActiveWorkbook.Sheets("sheet1") 'set to current worksheet name
'Open .csv file'
strFile = Application.GetOpenFilename("Text Files (*.csv),*.csv", , "Please select text file...")
With WS.QueryTables.Add(Connection:="TEXT;" & strFile, Destination:=WS.Range("A1"))
.TextFileParseType = xlDelimited
.TextFileCommaDelimiter = True
.Refresh
End With
' delete the querytable if there is one
On Error GoTo nothingtodelete
Sheets("Data").QueryTables(1).SaveData = False
Sheets("Data").QueryTables.Item(1).Delete
nothingtodelete:
End Sub
Code reference - https://stackoverflow.com/questions/12197274/is-there-a-way-to-import-data-from-csv-to-active-excel-sheet
Also, How do I save this file in .xlsm using VBA code .. Any help is appreciated!
I do not know if QueryTable method can be limited to a specific number of rows. But please, try this piece of code, instead. It should work and be fast enough:
Private Sub importcsvfile()
Dim WS As Worksheet, strFile As String, arrCSV, cols, dataCSV
Dim i As Long, nL As Long, c As Long, nrRows As Long, strAll As String
Dim st As Long, lEnd As Long
Set WS = ActiveSheet
nrRows = 5000
'Open .csv file'
strFile = Application.GetOpenFilename("Text Files (*.csv),*.csv", , "Please select text file...")
If strFile = "False" Then Exit Sub
arrCSV = Split(CreateObject("Scripting.FileSystemObject").OpenTextFile(strFile, 1).ReadAll, vbLf) 'more than three minutes for this line...
cols = Split(arrCSV(0), ",")
ReDim dataCSV(0 To nrRows - 1, 0 To UBound(cols))
nL = 0
For i = 0 To nrRows - 1
st = InStr(1, arrCSV(i), """"): lEnd = InStr(st + 1, arrCSV(i), """")
If st > 0 Then
arrCSV(i) = Replace(arrCSV(i), Mid(arrCSV(i), st, lEnd - st + 1), _
Replace(Mid(arrCSV(i), st + 1, lEnd - st - 1), ",", "/"))
End If
cols = Split(arrCSV(i), ",")
For c = 0 To UBound(cols)
dataCSV(nL, c) = cols(c)
Next
nL = nL + 1
Next i
WS.cells.Clear
WS.Range("A1").Resize(nrRows, UBound(dataCSV, 2) + 1).Value = dataCSV
End Sub
Edited:
Please, check the next code which does not read the whole file string at once. I couldn't imagine that it is really huge... This version is very fast. It takes seconds. I will also let the first version only for learning reason. This should be the main purpose of our community, I think:
Private Sub importcsvfileRLines()
Dim WS As Worksheet, strFile As String, arrCSV, cols, dataCSV
Dim i As Long, nL As Long, c As Long, nrRows As Long, strAll As String
Dim st As Long, lEnd As Long, myCSV As Object, txtLine As String
Set WS = ActiveSheet 'use here the seet you need
nrRows = 5000 'set here the number of rows to be returned
'Open .csv file'
strFile = Application.GetOpenFilename("Text Files (*.csv),*.csv", , "Please select text file...")
If strFile = "False" Then Exit Sub 'in case of pressing 'Cancel' button
nL = 1 'initialize the first (array) row tot be filled
ReDim dataCSV(1 To nrRows, 1 To 11) 'redim the necessary array (for big speed)
Set myCSV = CreateObject("Scripting.FileSystemObject").OpenTextFile(strFile, 1)
Do While myCSV.AtEndOfStream <> True 'iterate betweeb all existing lines
txtLine = myCSV.ReadLine 'put the text line in a variable
'solve the string problem, having comma delimiter between the double quotes:
st = InStr(1, txtLine, """") 'position of the first double quotes character
If st > 0 Then
lEnd = InStr(st + 1, txtLine, """") 'position of the last double quotes character
txtLine = Replace(txtLine, Mid(txtLine, st, lEnd - st + 1), _
Replace(Mid(txtLine, st + 1, lEnd - st - 1), ",", "/"))
End If
cols = Split(txtLine, ",") 'split the text on comma separator
For c = 0 To UBound(cols)
dataCSV(nL, c + 1) = cols(c) 'fill the array nL row
Next
nL = nL + 1
If nL = nrRows + 1 Then Exit Do 'if max set number of rows has been reached
Loop
myCSV.Close
WS.cells.Clear 'clear the old data
'drop the array value at once:
WS.Range("A1").Resize(nrRows, UBound(dataCSV, 2)).Value = dataCSV
MsgBox "Ready...", vbInformation, "Job finished"
End Sub
I have a block of data (below) in an excel sheet and I want to put it in a plain .txt file structured and comma delimited like an CSV format style. Here's the data:
dn: uid=7097202
carLicense: 113363427
cn: Jean Arnofski
title: LBM356226
st: 16777212
Next block separated by a blank row/line *
dn: uid=8178194
carLicense: 154052876
cn: Marie Gaulimerg
title: LBM356227
st: 16777219
etc...
The code I am using is this:
Option Explicit
Sub LDIFParaCSV()
Dim i, x, intHandle As Integer
Dim lngNrLinhs As Long
Dim rgnAlcance As Range
Dim objFolhaExcel As Worksheet
Dim strFich, strPChar As String
Dim arrItemsSep
Const cntEscrvFich = "csv_ldif.txt"
intHandle = FreeFile
Range("A2").Select
Set objFolhaExcel = ThisWorkbook.Sheets("Sheet1")
Set rgnAlcance = objFolhaExcel.UsedRange
lngNrLinhs = rgnAlcance.Rows.Count + rgnAlcance.Row
strFich = "d:\" & cntEscrvFich
Open strFich For Output Access Write As #intHandle
For i = 2 To lngNrLinhs + 1
If Not IsEmpty(Cells(i, 1)) Then
arrItemsSep = Split(Cells(i, 1), ":")
Print #intHandle, arrItemsSep(1)
ActiveCell.Offset(1, 0).Select
End If
Next
Close #intHandle
End Sub
What I am trying to achieve is to put each block of data horizontally in the txt file in a single line (ex: uid=7097202, 113363427, Jean Arnofski, LBM356226, 16777212) and then the next block in the same manner in the next paragraph and so on, so on...
I am having great difficulty in achieving this... sorry, not so much of a programmer I think.
Thanks
I have just added some lines to your code:
Option Explicit
Sub LDIFParaCSV()
Dim i, x, intHandle As Integer
Dim lngNrLinhs As Long
Dim rgnAlcance As Range
Dim objFolhaExcel As Worksheet
Dim strFich, strPChar As String
Dim arrItemsSep
Dim printStr As String
Const csvSeperator = ","
Const cntEscrvFich = "csv_ldif.txt"
printStr = ""
intHandle = FreeFile
Range("A2").Select
Set objFolhaExcel = ThisWorkbook.Sheets("Tabelle1")
Set rgnAlcance = objFolhaExcel.UsedRange
lngNrLinhs = rgnAlcance.Rows.Count + rgnAlcance.Row
strFich = "D:\" & cntEscrvFich
Open strFich For Output Access Write As #intHandle
For i = 2 To lngNrLinhs + 1
If Not IsEmpty(Cells(i, 1)) Then
arrItemsSep = Split(Cells(i, 1), ":")
'collect string elements
printStr = printStr & arrItemsSep(1) & csvSeperator
ActiveCell.Offset(1, 0).Select
Else
If printStr <> "" Then ' empty lines at end of range?
printStr = Left(printStr, Len(printStr) - 2) ' remove trailing seperator
Print #intHandle, Trim(printStr) ' trim necessary?
printStr = ""
End If
End If
Next
Close #intHandle
End Sub
I have some VBA code that loops through all the cells in Column E , it then looks for a match in the text file and enters the first word from the line of text that is matched in the txt file. This works , but when I loop through over 10000 rows , it takes 3 hours to complete.
Just checking if there was a more efficient way of doing this. To speed up the process. Any help would be much appreciated
Sub SearchTextFile()
Dim x
Dim hex, reg As String
Dim strsearch As String
x = Sheet9.Range("Q1").Value
Dim hexa As String
Const strFileName = "T:\Hex\Hex_Codes.txt"
hex = Sheet9.Range("P1").Value
reg = Sheet1.Range("E" & x).Value
If Right(reg, 1) = "-" Then GoTo err
strsearch = hex
Dim strLine As String
Dim f As Integer
Dim lngLine As Long
Dim blnFound As Boolean
f = FreeFile
Open strFileName For Input As #f
Do While Not EOF(f)
lngLine = lngLine + 1
Line Input #f, strLine
If InStr(1, strLine, strsearch, vbBinaryCompare) > 0 Then
Text = Text & strLine
On Error GoTo err
Sheet1.Range("L" & x).Value = Format(Split(Text, ",")(0), "#")
blnFound = True
Exit Do
End If
Loop
Close #f
If Not blnFound Then
End If
err:
End Sub
Sub searchReg()
Application.ScreenUpdating = False
Application.EnableEvents = False
Dim lr1
lr1 = Sheet1.Range("A" & Rows.Count).End(xlUp).Row
Dim y
For y = 2 To lr1
Sheet9.Range("P1").Value = Sheet1.Range("E" & y).Value
Sheet9.Range("Q1").Value = y
Call SearchTextFile
Next y
Call verify_text_formulas
Application.ScreenUpdating = True
Application.EnableEvents = True
End Sub
I've used a single static read from the text file to avoid repetitive open.read/close calls.
I've removed the use of Sheet8 as a holding/transfer place for values by passing the important and necessary values into the SearchTextFile helper as parameters/arguments.
You provided no information on verify_text_formulas so it is currently commented out.
hex and reg seemed to be the same thing; hexa was never used; text was never declared and seemed redundant. I've reduced the use of extraneous variables.
Option Explicit
Const cFileName As String = "T:\Hex\Hex_Codes.txt"
Sub SearchTextFile(Optional hex As String, _
Optional ln As Long = -1, _
Optional closeOff As Boolean = False)
Static txt As String
Dim ff As Integer, p As Long, ps As Long, str As String
Dim blnFound As Boolean
If txt = vbNullString Then
'Debug.Print "new read"
ff = FreeFile(1)
Open cFileName For Binary As #ff
txt = Space$(LOF(ff))
Get #ff, , txt
Close #ff
txt = vbLf & txt
End If
If closeOff Then
'Debug.Print "closed"
txt = vbNullString
Else
p = InStr(1, txt, hex, vbBinaryCompare)
If p > 0 Then
ps = InStrRev(txt, vbLf, p, vbBinaryCompare)
str = Chr(39) & Split(Mid(txt, ps + 1, p - ps + 1), ",")(0)
Sheet1.Cells(ln, "L") = str
blnFound = True
End If
End If
If Not blnFound Then
End If
err:
End Sub
Sub searchReg()
Dim y As Long
debug.print timer
'Application.ScreenUpdating = False
'Application.EnableEvents = False
For y = 2 To Sheet1.Range("A" & Rows.Count).End(xlUp).Row
SearchTextFile hex:=Sheet1.Cells(y, "E").Value, ln:=y
Next y
SearchTextFile closeOff:=True
'Call verify_text_formulas
Application.ScreenUpdating = True
Application.EnableEvents = True
debug.print timer
End Sub
Post back some timer counts for various record sets if you have a chance. This was tested on a small (30 rows) .TXT file but I would be interested in the time it takes to process larger files.