E
esteiner
am scrubbing values of a field in an Access 2003 table and updating the
results in the same table.
The routine runs OK but the size of the database goes to 600+ MB. I must run
the compact and repair database operation each time I run the routine. The
amount of data to process will continue to increase, so I would like to make
this routine as efficient as possible. I currently process about 200K
records, but expect this to grow to 1 million+
I assume that this is caused by the many read/write cycles to the hard disk.
Is there a way to operate on the table/recordset in RAM before writing to
disk? Any help/suggestions to make this more efficient would be appreciated .
Below is the code that runs the routine:
Private Sub cmdSplitMPN_Click()
On Error GoTo E_Handle
Dim rs As Recordset ' Recordset object
Dim strTableName As String ' Name of table where export records originate
Dim strSQL As String ' SQL String
Dim intRecordNum As Long ' Record Number being dealt with
Dim strMPN As String
Dim I As Integer
Dim strPrefix As String
Dim strBasenum As String
Dim blHasNum As Boolean
Set db = DBEngine(0)(0)
' Create the table name from where the export records will orginate
strTableName = "pull_inv_BRE"
strSQL = "SELECT ManfPartNum, CorePartNum FROM " & strTableName & ""
Set rs = db.OpenRecordset(strSQL)
With rs
While Not (.BOF Or .EOF)
strMPN = !ManfPartNum
For I = 1 To Len(strMPN)
blHasNum = False
If IsNumeric(Mid(strMPN, I, 1)) Then
'number found, exit
blHasNum = True
Exit For
End If
Next I
If (blHasNum = True) Then
strBasenum = Right(strMPN, Len(strMPN) - I + 1)
.Edit
!CorePartNum = strBasenum
.Update
End If
.MoveNext
Wend
..Close
End With
sExit:
On Error Resume Next
rs.Close
Set rs = Nothing
Set db = Nothing
Reset
Exit Sub
E_Handle:
MsgBox Err.Description, vbOKOnly + vbCritical, "Error: " & Err.Number
Resume sExit
End Sub
Thank you for your help!!!
-Charlie
results in the same table.
The routine runs OK but the size of the database goes to 600+ MB. I must run
the compact and repair database operation each time I run the routine. The
amount of data to process will continue to increase, so I would like to make
this routine as efficient as possible. I currently process about 200K
records, but expect this to grow to 1 million+
I assume that this is caused by the many read/write cycles to the hard disk.
Is there a way to operate on the table/recordset in RAM before writing to
disk? Any help/suggestions to make this more efficient would be appreciated .
Below is the code that runs the routine:
Private Sub cmdSplitMPN_Click()
On Error GoTo E_Handle
Dim rs As Recordset ' Recordset object
Dim strTableName As String ' Name of table where export records originate
Dim strSQL As String ' SQL String
Dim intRecordNum As Long ' Record Number being dealt with
Dim strMPN As String
Dim I As Integer
Dim strPrefix As String
Dim strBasenum As String
Dim blHasNum As Boolean
Set db = DBEngine(0)(0)
' Create the table name from where the export records will orginate
strTableName = "pull_inv_BRE"
strSQL = "SELECT ManfPartNum, CorePartNum FROM " & strTableName & ""
Set rs = db.OpenRecordset(strSQL)
With rs
While Not (.BOF Or .EOF)
strMPN = !ManfPartNum
For I = 1 To Len(strMPN)
blHasNum = False
If IsNumeric(Mid(strMPN, I, 1)) Then
'number found, exit
blHasNum = True
Exit For
End If
Next I
If (blHasNum = True) Then
strBasenum = Right(strMPN, Len(strMPN) - I + 1)
.Edit
!CorePartNum = strBasenum
.Update
End If
.MoveNext
Wend
..Close
End With
sExit:
On Error Resume Next
rs.Close
Set rs = Nothing
Set db = Nothing
Reset
Exit Sub
E_Handle:
MsgBox Err.Description, vbOKOnly + vbCritical, "Error: " & Err.Number
Resume sExit
End Sub
Thank you for your help!!!
-Charlie